ngram
listlengths
0
67.8k
[ "required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\")", "clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\") raise forms.ValidationError(\"This", "room already exists\") raise forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form): \"\"\"Player", "\"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\" if", "from .models import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True)", "room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This", "exists\") raise forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form): \"\"\"Player Creation form.\"\"\"", "already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form): \"\"\"Player Creation form.\"\"\" player_name = forms.CharField(max_length=128, required=True)", "Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean", "= forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room", ".models import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def", "django import forms from .models import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name", "room already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form): \"\"\"Player Creation form.\"\"\" player_name = forms.CharField(max_length=128,", "room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\") raise forms.ValidationError(\"This room already", "class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room", "forms from .models import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128,", "self.add_error(\"room_name\", \"This room already exists\") raise forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\") class", "raise forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form): \"\"\"Player Creation form.\"\"\" player_name", "Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists():", "<filename>website/rooms/forms.py from django import forms from .models import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation", "RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\"", "name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\") raise forms.ValidationError(\"This room already exists\")", "\"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\") raise forms.ValidationError(\"This room", "form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\",", "import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name = forms.CharField(max_length=128, required=True) def clean_room_name(self):", "from django import forms from .models import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\"", "forms.CharField(max_length=128, required=True) def clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already", "\"This room already exists\") raise forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form):", "forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form): \"\"\"Player Creation form.\"\"\" player_name =", "import forms from .models import Room class RoomCreationForm(forms.Form): \"\"\"Room Creation form.\"\"\" room_name =", "def clean_room_name(self): \"\"\"Clean room name.\"\"\" if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\") raise", "already exists\") raise forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\") class PlayerCreationForm(forms.Form): \"\"\"Player Creation", "if Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\") raise forms.ValidationError(\"This room already exists\") return", "Room.objects.filter(name=self.cleaned_data.get(\"room_name\")).exists(): self.add_error(\"room_name\", \"This room already exists\") raise forms.ValidationError(\"This room already exists\") return self.cleaned_data.get(\"room_name\")" ]
[ "sample){ unsigned int current_node = 0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){", "class_aux = list(map(lambda x : x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis = 1)", "else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node]; } \"\"\"", "NO_NODES %s unsigned char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES]", "/* The following code was generated using Clara.Transpiler. For more information please visit:", "predict(double * sample){ unsigned int current_node = 0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx", "RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node]; } \"\"\" % (self.model.tree_.node_count, self.classes,", "self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\"", "The following code was generated using Clara.Transpiler. For more information please visit: https://github.com/asergiobranco/clara", "RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] = {%s}; int predict(double * sample){ unsigned int", "self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda", "model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str))", "self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda x", "build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /* The following code was", "{%s}; int predict(double * sample){ unsigned int current_node = 0; int feature_idx =", "following code was generated using Clara.Transpiler. For more information please visit: https://github.com/asergiobranco/clara */", "code was generated using Clara.Transpiler. For more information please visit: https://github.com/asergiobranco/clara */ #define", "= FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{ current_node =", "self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /* The following code was generated", "current_node++; } else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node];", "information please visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned char classes[NO_NODES] = {%s};", "numpy as np class DecisionTreeClassifierTranspiler(object): def __init__(self, model): self.model = model self.build_classes() self.build_feature_idx()", "= np.argmax(class_aux, axis = 1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1',", "','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def", "int current_node = 0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <=", "int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{", "model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux", "= {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] =", "self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda x : x[0], self.model.tree_.value)) self.classes", "DecisionTreeClassifierTranspiler(object): def __init__(self, model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self):", ": x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis = 1) self.classes = ','.join(self.classes.astype(str)) def", "For more information please visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned char classes[NO_NODES]", "float THRESHOLDS[NO_NODES] = {%s}; int predict(double * sample){ unsigned int current_node = 0;", "= {%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] = {%s}; int predict(double *", "unsigned int current_node = 0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx]", "build_classes(self): class_aux = list(map(lambda x : x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis =", "classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES]", "self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds =", "more information please visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned char classes[NO_NODES] =", "axis = 1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def", "self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda x :", "class DecisionTreeClassifierTranspiler(object): def __init__(self, model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def", "as np class DecisionTreeClassifierTranspiler(object): def __init__(self, model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes()", "= model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self):", "def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /* The following code", "0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx =", "= RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node]; } \"\"\" % (self.model.tree_.node_count,", "THRESHOLDS[current_node]){ current_node++; } else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; } return", "list(map(lambda x : x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis = 1) self.classes =", "def generate_code(self): return \"\"\" /* The following code was generated using Clara.Transpiler. For", "feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node]; } \"\"\" % (self.model.tree_.node_count, self.classes, self.features_idx, self.right_nodes,", "%s unsigned char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] =", "= ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str))", "= ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /* The following code was generated using", "} else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node]; }", "} feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node]; } \"\"\" % (self.model.tree_.node_count, self.classes, self.features_idx,", "def __init__(self, model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx", "*/ #define NO_NODES %s unsigned char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s};", "Clara.Transpiler. For more information please visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned char", "'0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /* The following", "np class DecisionTreeClassifierTranspiler(object): def __init__(self, model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds()", "{%s}; float THRESHOLDS[NO_NODES] = {%s}; int predict(double * sample){ unsigned int current_node =", "= {%s}; int predict(double * sample){ unsigned int current_node = 0; int feature_idx", "def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda x : x[0],", "{%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] = {%s};", "visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES]", "def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self):", "int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] = {%s}; int predict(double * sample){ unsigned", "= ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda x : x[0], self.model.tree_.value)) self.classes =", "#define NO_NODES %s unsigned char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int", "{%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] = {%s}; int predict(double * sample){", "https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] =", "if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node];", "build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda x : x[0], self.model.tree_.value))", "= 0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++;", "build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return", "FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{ current_node = RIGHT_CHILDS[current_node];", "was generated using Clara.Transpiler. For more information please visit: https://github.com/asergiobranco/clara */ #define NO_NODES", "generated using Clara.Transpiler. For more information please visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s", "THRESHOLDS[NO_NODES] = {%s}; int predict(double * sample){ unsigned int current_node = 0; int", "current_node = RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; } return classes[current_node]; } \"\"\" %", "char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float", "* sample){ unsigned int current_node = 0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >=", "def build_classes(self): class_aux = list(map(lambda x : x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis", "self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux =", "__init__(self, model): self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx =", "self.model = model self.build_classes() self.build_feature_idx() self.build_right_nodes() self.build_thresholds() def build_feature_idx(self): self.features_idx = ','.join(self.model.tree_.feature.astype(str)) def", "x : x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis = 1) self.classes = ','.join(self.classes.astype(str))", "while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{ current_node = RIGHT_CHILDS[current_node]; }", "x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis = 1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self):", "','.join(self.model.tree_.feature.astype(str)) def build_classes(self): class_aux = list(map(lambda x : x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux,", "= {%s}; float THRESHOLDS[NO_NODES] = {%s}; int predict(double * sample){ unsigned int current_node", "current_node = 0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){", "please visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned char classes[NO_NODES] = {%s}; int", "self.model.tree_.value)) self.classes = np.argmax(class_aux, axis = 1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes", "\"\"\" /* The following code was generated using Clara.Transpiler. For more information please", "feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{ current_node", "1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds", "<= THRESHOLDS[current_node]){ current_node++; } else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx = FEATURE_IDX_NODE[current_node]; }", "','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /* The", "= ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self): self.thresholds = ','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /*", "= list(map(lambda x : x[0], self.model.tree_.value)) self.classes = np.argmax(class_aux, axis = 1) self.classes", "using Clara.Transpiler. For more information please visit: https://github.com/asergiobranco/clara */ #define NO_NODES %s unsigned", "','.join(self.model.tree_.threshold.astype(str)) def generate_code(self): return \"\"\" /* The following code was generated using Clara.Transpiler.", "FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] = {%s}; int predict(double", ">= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; } else{ current_node = RIGHT_CHILDS[current_node]; } feature_idx", "int predict(double * sample){ unsigned int current_node = 0; int feature_idx = FEATURE_IDX_NODE[0];", "= FEATURE_IDX_NODE[current_node]; } return classes[current_node]; } \"\"\" % (self.model.tree_.node_count, self.classes, self.features_idx, self.right_nodes, self.thresholds)", "= 1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0') def build_thresholds(self):", "self.classes = np.argmax(class_aux, axis = 1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes =", "return \"\"\" /* The following code was generated using Clara.Transpiler. For more information", "generate_code(self): return \"\"\" /* The following code was generated using Clara.Transpiler. For more", "0; int feature_idx = FEATURE_IDX_NODE[0]; while(feature_idx >= 0){ if(sample[feature_idx] <= THRESHOLDS[current_node]){ current_node++; }", "import numpy as np class DecisionTreeClassifierTranspiler(object): def __init__(self, model): self.model = model self.build_classes()", "unsigned char classes[NO_NODES] = {%s}; int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] = {%s};", "int FEATURE_IDX_NODE[NO_NODES] = {%s}; int RIGHT_CHILDS[NO_NODES] = {%s}; float THRESHOLDS[NO_NODES] = {%s}; int", "np.argmax(class_aux, axis = 1) self.classes = ','.join(self.classes.astype(str)) def build_right_nodes(self): self.right_nodes = ','.join(self.model.tree_.children_right.astype(str)).replace('-1', '0')" ]
[]
[ "+ \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass", "404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod", "requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] # skip first character which", "\"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server)", "from helpers.fake_http_server import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls): cls.SERVER", "\"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response,", "except: pass def send_and_check_request(url, request): url = url + request response = requests.get(url)", "def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException,", "requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response =", "test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout)", "skip first character which always is '/' assert response.status_code == FakeServer.DEFAULT_RESPONSE_CODE assert response.text", "helpers.fake_http_server import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls): cls.SERVER =", "= 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException,", "def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER def", "expected_response_code) response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls):", "def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def", "def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process())", "def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected Response\"", "self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout =", "requests from helpers.fake_http_server import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls):", "= requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] # skip first character", "response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try:", "FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def", "timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected", "FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server()", "self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException,", "character which always is '/' assert response.status_code == FakeServer.DEFAULT_RESPONSE_CODE assert response.text == FakeServer.DEFAULT_RESPONSE", "self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code =", "def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def send_and_check_request(url, request): url = url +", "import unittest import requests from helpers.fake_http_server import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None", "= FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(),", "self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process())", "test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once)", "assert request in received_request[1:] # skip first character which always is '/' assert", "in received_request[1:] # skip first character which always is '/' assert response.status_code ==", "+ request response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] #", "response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def send_and_check_request(url, request):", "self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code = 404", "= 404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code)", "self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def send_and_check_request(url, request): url", "response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def send_and_check_request(url, request): url =", "import requests from helpers.fake_http_server import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def", "self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout)", "cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self):", "send_and_check_request(url, request): url = url + request response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read()", "self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with", "timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server()", "cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive())", "self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3", "self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code)", "test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url()", "expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code,", "def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self):", "def test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response =", "def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(),", "self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server()", "with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever()", "setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\")", "self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response,", "@classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def send_and_check_request(url, request): url = url", "open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] # skip first character which always is '/'", "received_request[1:] # skip first character which always is '/' assert response.status_code == FakeServer.DEFAULT_RESPONSE_CODE", "def setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(),", "unittest import requests from helpers.fake_http_server import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod", "self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process())", "self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def send_and_check_request(url,", "test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def", "self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def", "try: cls.SERVER.stop_server() except: pass def send_and_check_request(url, request): url = url + request response", "test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout", "requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except:", "@classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER", "url = url + request response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request", "url + request response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:]", "class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever()", "0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server)", "\"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def", "# skip first character which always is '/' assert response.status_code == FakeServer.DEFAULT_RESPONSE_CODE assert", "= requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server()", "setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self):", "self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self):", "self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(),", "expected_response = \"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() +", "received_request = open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] # skip first character which always", "= url + request response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request in", "self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response", "self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def", "request): url = url + request response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert", "= open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] # skip first character which always is", "cls.SERVER.stop_server() except: pass def send_and_check_request(url, request): url = url + request response =", "request in received_request[1:] # skip first character which always is '/' assert response.status_code", "first character which always is '/' assert response.status_code == FakeServer.DEFAULT_RESPONSE_CODE assert response.text ==", "= FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process())", "pass def send_and_check_request(url, request): url = url + request response = requests.get(url) received_request", "self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout)", "cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process())", "send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException,", "test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout): requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self):", "Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text)", "self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() + \"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def", "= None @classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server", "timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response", "SERVER = None @classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self):", "= \"Expected Response\" expected_response_code = 404 self.server.set_expected_response(expected_response, expected_response_code) response = requests.get(self.server.get_url() + \"request\")", "tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def send_and_check_request(url, request): url = url + request", "None @classmethod def setUpClass(cls): cls.SERVER = FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server =", "send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self): self.assertTrue(self.server.is_ready_to_process()) self.assertRaises(FakeServer.ServerStateException, self.server.serve_once) self.assertRaises(FakeServer.ServerStateException, self.server.serve_forever)", "self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\") self.assertTrue(self.server.is_ready_to_process()) def test_server_overlapped_listeners(self):", "FakeServer() cls.SERVER.start_server() cls.SERVER.serve_forever() def setUp(self): self.server = FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def", "\"request\") self.assertEquals(expected_response, response.text) self.assertEquals(expected_response_code, response.status_code) @classmethod def tearDownClass(cls): try: cls.SERVER.stop_server() except: pass def", "test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self): expected_response = \"Expected Response\" expected_response_code", "def send_and_check_request(url, request): url = url + request response = requests.get(url) received_request =", "self.server.serve_forever) def test_server_start_overlapped_instances(self): self.assertRaises(FakeServer.ServerStateException, self.server.start_server) def test_timeout_triggers_only_once_per_call(self): timeout = 0.3 self.server.set_timeout_delay(timeout) with self.assertRaises(requests.exceptions.ReadTimeout):", "FakeServerTest.SERVER def test_is_server_alive(self): self.assertTrue(self.server.is_alive()) self.assertTrue(self.server.is_ready_to_process()) def test_server_process_forever(self): self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request1\") self.assertTrue(self.server.is_ready_to_process()) send_and_check_request(self.server.get_url(), \"request2\")", "import FakeServer class FakeServerTest(unittest.TestCase): SERVER = None @classmethod def setUpClass(cls): cls.SERVER = FakeServer()", "requests.get(self.server.get_url(), timeout=timeout) requests.get(self.server.get_url(), timeout=timeout) def test_server_stop_multiple_times(self): self.server.stop_server() self.assertRaises(FakeServer.ServerStateException, self.server.stop_server) self.server.start_server() self.server.serve_forever() def test_set_custom_response(self):", "request response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] # skip", "response = requests.get(url) received_request = open(FakeServer.REQUEST_FILE).read() assert request in received_request[1:] # skip first" ]
[ "= 800, height = 600) master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2, width =", "= 4) master0.mainloop() def ntb1(self): master2 = Tk() master2.minsize(width = 800, height =", "img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10] text2 =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1,", "= 600) master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2, width = 300, height =", "= lst[7] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulsel1(self): master2", "text1 = lst[36] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulbar1(self):", "columnspan = 4) self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25) self.bprov21.grid(row", "master0.mainloop() def kbang1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Bugis', command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "= 4) master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row", "lst[7] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width = 25,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\")", "column = 6, columnspan = 4) master0.mainloop() def malu1(self): master2 = Tk() master2.minsize(width", "1, column = 3, columnspan = 4) self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2,", "text2.pack() master2.mainloop() def __prov23(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kasoami',", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self): master2 = Tk() master2.minsize(width = 800, height", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self): master0 = Tk() master0.minsize(width = 450,", "provinsi yang ingin anda ketahui\", font = \"Arial 16 bold\") self.master.judul.grid(row = 0,", "riau1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width = 25,", "= canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60] text2 = Text(master2, font", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Timur\")", "Button(self.master, text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row = 17, column = 7, columnspan", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Binte", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gulai Taboh',", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width = 25,", "text2.pack() master2.mainloop() def aceh2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self): master0 = Tk() master0.minsize(width = 450, height", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2,", "img = PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12] text2", "while('<deskripsi>' not in line): line = f.readline() cmp = '' txt = ''", "600) master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2, width = 300, height = 300)", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self): master2 = Tk() master2.minsize(width =", "tradisional Sulawesi Tenggara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan Bakar", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width", "Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1] text2 = Text(master2, font = \"Arial", "columnspan = 4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row", "width = 25) self.bprov8.grid(row = 8, column = 3, columnspan = 4) self.bprov9", "master2.mainloop() def riau2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width", "= 1, column = 6, columnspan = 4) master0.mainloop() def jambi1(self): master2 =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self): master2 = Tk() master2.minsize(width", "4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 1, column = 6, columnspan = 4) master0.mainloop() def diy1(self): master2 =", "= PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45] text2", "cmp = '' txt = '' while('<end>' not in cmp): txt += cmp", "text='Pempek Palembang', command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "lst[16] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self): master2 = Tk() master2.minsize(width = 800, height", "= 3, columnspan = 4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width = 25)", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku\") canvas", "columnspan = 4) master0.mainloop() def beng1(self): master2 = Tk() master2.minsize(width = 800, height", "600) master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2, width = 300, height = 300)", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47] text2 = Text(master2, font = \"Arial 12\")", "master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row = 1,", "tradisional Kalimantan Timur\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self): master2 =", "= lst[17] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "width = 25) self.bprov19.grid(row = 2, column = 7, columnspan = 4) self.bprov20", "width = 25) self.bprov1.grid(row = 1, column = 3, columnspan = 4) self.bprov2", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW,", "6, columnspan = 4) master0.mainloop() def bali1(self): master2 = Tk() master2.minsize(width = 800,", "anchor=NW, image=img) text1 = lst[9] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "def __prov22(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= lst[45] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "width = 25) self.bprov23.grid(row = 6, column = 7, columnspan = 4) self.bprov24", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat \") canvas", "3, columnspan = 4) self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25)", "= 4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row =", "anchor=NW, image=img) text1 = lst[11] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "image=img) text1 = lst[64] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= Button(master0, text='<NAME>', command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lepek", "Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 6, columnspan = 4) master0.mainloop() def sulut1(self): master2 = Tk() master2.minsize(width =", "master2.mainloop() def __prov18(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "master2.mainloop() def __prov9(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25) self.bprov3.grid(row = 3, column = 3,", "Timpan', command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self): master2 =", "anchor=NW, image=img) text1 = lst[36] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "lst[38] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57] text2 = Text(master2,", "= PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47] text2 =", "text1 = lst[37] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width", "3, columnspan = 4) self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25)", "= PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51] text2 = Text(master2,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self): master2", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self): master2 = Tk()", "def sulgar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self): master0 = Tk()", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jatim1(self): master2", "Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= 4) master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25, height=3) master0.bprov1.grid(row", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width =", "text='<NAME>', command=self.riau1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "600) master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2, width = 300, height = 300)", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sumsel1(self): master2", "text1) text2.pack() master2.mainloop() def malu2(self): master2 = Tk() master2.minsize(width = 800, height =", "__prov4(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\")", "Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2,", "Button(master0, text='<NAME>', command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "4) master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width = 25, height=3) master0.bprov1.grid(row =", "command=self.diy1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= lst[39] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 7, columnspan = 4) self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width =", "PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30] text2 = Text(master2,", "columnspan = 4) self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40] text2 = Text(master2, font = \"Arial 12\")", "Button(self.master, text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row = 1, column = 3, columnspan", "img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63] text2", "height = 600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas = Canvas(master2, width = 300,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self):", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Luti", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self):", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self): master2", "Bangka Belitung\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width = 25,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self): master2 = Tk() master2.minsize(width = 800, height", "= lst[29] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13] text2 = Text(master2, font = \"Arial 12\")", "def __prov13(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row = 17, column = 3, columnspan =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas = Canvas(master2,", "command=self.dki2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49] text2 = Text(master2, font = \"Arial 12\")", "lst[26] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 4) master0.mainloop() def sulsel1(self): master2 = Tk() master2.minsize(width = 800, height =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2] text2 = Text(master2, font = \"Arial 12\")", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sumut1(self): master2", "= 7, columnspan = 4) self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width =", "Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8] text2 = Text(master2, font = \"Arial", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self): master0 = Tk()", "PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64] text2 =", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2,", "master0.mainloop() def ntb1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "def sumsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "text2.pack() master2.mainloop() def sumsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text1) text2.pack() master2.mainloop() def ban2(self): master2 = Tk() master2.minsize(width = 800, height =", "lst[50] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Barat\") canvas", "def ntt2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW,", "= PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38] text2 = Text(master2,", "Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "master2.mainloop() def __prov33(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "lst = [] def readf(): with open('all.txt', 'r') as f: line = ''", "= 4) self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25) self.bprov12.grid(row =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self): master2 = Tk() master2.minsize(width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self): master0 = Tk() master0.minsize(width = 450, height", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text2.pack() master2.mainloop() def kalsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "columnspan = 4) master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25, height=3)", "def __prov14(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "column = 3, columnspan = 4) self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width", "= 8, column = 3, columnspan = 4) self.bprov9 = Button(self.master, text='Kepulauan Bangka", "= 600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas = Canvas(master2, width = 300, height", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self): master0 = Tk()", "img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43] text2 =", "columnspan = 8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row =", "4, column = 7, columnspan = 4) self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22,", "i in range(68): while('<deskripsi>' not in line): line = f.readline() cmp = ''", "column = 7, columnspan = 4) self.master.mainloop() def __prov1(self): master0 = Tk() master0.minsize(width", "columnspan = 4) self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25) self.bprov27.grid(row", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self): master2 = Tk() master2.minsize(width", "text1) text2.pack() master2.mainloop() def sumsel2(self): master2 = Tk() master2.minsize(width = 800, height =", "columnspan = 4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width = 25,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img)", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\")", "columnspan = 4) master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25, height=3)", "self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row = 14, column", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "columnspan = 4) master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width = 25, height=3)", "canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46] text2 = Text(master2, font =", "width = 25) self.bprov20.grid(row = 3, column = 7, columnspan = 4) self.bprov21", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self): master2 = Tk() master2.minsize(width = 800, height", "PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4] text2 = Text(master2, font", "master0.mainloop() def diy1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= Button(self.master, text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row = 11, column = 3,", "text1 = lst[15] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self): master0 = Tk() master0.minsize(width = 450, height", "25) self.bprov32.grid(row = 15, column = 7, columnspan = 4) self.bprov33 = Button(self.master,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self): master0 = Tk() master0.minsize(width =", "master2.mainloop() def __prov16(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "1, column = 6, columnspan = 4) master0.mainloop() def malu1(self): master2 = Tk()", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width = 25,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35] text2 = Text(master2, font = \"Arial 12\")", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Uta Kelo',", "7, columnspan = 4) self.master.mainloop() def __prov1(self): master0 = Tk() master0.minsize(width = 450,", "= canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13] text2 = Text(master2, font", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1,", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW,", "= lst[26] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width =", "800, height = 600) master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2, width = 300,", "1, column = 6, columnspan = 4) master0.mainloop() def sumsel1(self): master2 = Tk()", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self): master0 = Tk()", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Lilit.png\")", "Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row = 13, column = 7, columnspan =", "image=img) text1 = lst[4] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width = 25,", "text=\"Se'i\", command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row =", "6, columnspan = 4) master0.mainloop() def sulut1(self): master2 = Tk() master2.minsize(width = 800,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas = Canvas(master2,", "image=img) text1 = lst[66] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 12, column = 3, columnspan = 4) self.bprov13 = Button(self.master, text='DKI Jakarta',", "columnspan = 4) self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25) self.bprov12.grid(row", "text2.pack() master2.mainloop() def __prov17(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "= 4) master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row", "def __init__(self, master=Tk()): self.master = master master.minsize(width = 500, height = 600) master.maxsize(width", "text2.pack() master2.mainloop() def bali2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self): master0 = Tk() master0.minsize(width =", "img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18] text2", "Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self):", "master2.mainloop() def sulteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "columnspan = 4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self): master0 = Tk() master0.minsize(width", "text2.pack() master2.mainloop() def kriau2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "image=img) text1 = lst[36] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "anchor=NW, image=img) text1 = lst[26] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "text1 = lst[28] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 3, columnspan = 8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width = 25)", "master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width = 25, height=3) master0.bprov2.grid(row = 1,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self): master2 = Tk() master2.minsize(width = 800, height", "Utara', command=self.__prov31, width = 25) self.bprov31.grid(row = 14, column = 7, columnspan =", "__prov6(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self): master0 = Tk() master0.minsize(width = 450, height", "text='Kalimantan Timur', command=self.__prov24, width = 25) self.bprov24.grid(row = 7, column = 7, columnspan", "master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas = Canvas(master2, width = 300, height = 300)", "anchor=NW, image=img) text1 = lst[2] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang", "master0.mainloop() def sulsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Mie", "= lst[66] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= Button(master0, text='<NAME>', command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25,", "image=img) text1 = lst[35] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 1, column = 6, columnspan = 4) master0.mainloop() def papua1(self): master2 =", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tiwul',", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3)", "img = PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56] text2", "Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25) self.bprov12.grid(row = 12, column = 3,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self):", "4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 1, column = 6, columnspan = 4) master0.mainloop() def ban1(self): master2 =", "tkinter import * lst = [] def readf(): with open('all.txt', 'r') as f:", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self): master0 = Tk() master0.minsize(width", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Asidah',", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width =", "= 600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas = Canvas(master2, width = 300, height", "__prov17(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self): master0", "tradisional Papua\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "master0.mainloop() def kalbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "columnspan = 4) master0.mainloop() def jabar1(self): master2 = Tk() master2.minsize(width = 800, height", "master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "image=img) text1 = lst[13] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width =", "= 800, height = 600) master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2, width =", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() if __name__ == \"__main__\": readf() DirektoriMakanan()", "text1) text2.pack() master2.mainloop() def __prov18(self): master0 = Tk() master0.minsize(width = 450, height =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW,", "= PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6] text2 =", "Palembang', command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "columnspan = 4) master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25, height=3)", "= f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()): self.master = master master.minsize(width = 500,", "= Button(master0, text='<NAME>u', command=self.bali2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= lst[63] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "14, column = 3, columnspan = 4) self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15,", "height = 600) master2.title(\"Makanan tradisional Jawa Timur\") canvas = Canvas(master2, width = 300,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "tradisional Kalimantan Tengha\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42] text2 = Text(master2, font = \"Arial", "columnspan = 4) master0.mainloop() def riau1(self): master2 = Tk() master2.minsize(width = 800, height", "600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas = Canvas(master2, width = 300, height =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59] text2 = Text(master2, font = \"Arial 12\")", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self):", "anchor=NW, image=img) text1 = lst[59] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self): master2 = Tk()", "10, column = 7, columnspan = 4) self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28,", "image=img) text1 = lst[51] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row =", "def kriau1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "self.bprov31.grid(row = 14, column = 7, columnspan = 4) self.bprov32 = Button(self.master, text='Maluku',", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Apang", "4) master0.mainloop() def goron1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "height = 600) master.maxsize(width = 500, height = 600) self.master.title(\"Selamat Datang di McDones", "= PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40] text2", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self): master2 = Tk() master2.minsize(width", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18] text2 = Text(master2,", "Bengkulu\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "text2.pack() master2.mainloop() def __prov34(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row = 1,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self): master2 = Tk()", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= 1, column = 6, columnspan = 4) master0.mainloop() def kriau1(self): master2 =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5,", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width =", "= 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas = Canvas(master2, width", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width =", "25) self.bprov30.grid(row = 13, column = 7, columnspan = 4) self.bprov31 = Button(self.master,", "lst[57] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\")", "Sumsum', command=self.ban1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tapa", "= 4) master0.mainloop() def jateng1(self): master2 = Tk() master2.minsize(width = 800, height =", "master0.mainloop() def lamp1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "8, column = 3, columnspan = 4) self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung',", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self): master2 = Tk() master2.minsize(width = 800, height", "tradisional Sulawesi Tengah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "25) self.bprov3.grid(row = 3, column = 3, columnspan = 4) self.bprov4 = Button(self.master,", "image=img) text1 = lst[25] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57] text2 = Text(master2, font", "anchor=NW, image=img) text1 = lst[49] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Kudus', command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "800, height = 600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas = Canvas(master2, width =", "__prov8(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width =", "= 6, columnspan = 4) master0.mainloop() def malut1(self): master2 = Tk() master2.minsize(width =", "lst[17] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width = 25,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self): master0 = Tk()", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self): master2 = Tk() master2.minsize(width", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width = 25, height=3)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self): master2", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\")", "text='Kalimantan Utara', command=self.__prov20, width = 25) self.bprov20.grid(row = 3, column = 7, columnspan", "anchor=NW, image=img) text1 = lst[41] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "image=img) text1 = lst[55] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas =", "= 600) master.maxsize(width = 500, height = 600) self.master.title(\"Selamat Datang di McDones (Direktori", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self): master2", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1,", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2,", "column = 3, columnspan = 4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35]", "Belitung\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\")", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width = 25,", "= 600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas = Canvas(master2, width = 300, height", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self): master2 = Tk()", "canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33] text2 = Text(master2, font =", "column = 3, columnspan = 4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width =", "= lst[8] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "jateng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text1) text2.pack() master2.mainloop() def sulut2(self): master2 = Tk() master2.minsize(width = 800, height =", "command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width = 25, height=3) master0.bprov2.grid(row =", "PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62] text2 =", "command=self.__prov3, width = 25) self.bprov3.grid(row = 3, column = 3, columnspan = 4)", "1, column = 6, columnspan = 4) master0.mainloop() def goron1(self): master2 = Tk()", "__prov12(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "column = 3, columnspan = 4) self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img)", "self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row = 12, column", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW,", "25) self.bprov26.grid(row = 9, column = 7, columnspan = 4) self.bprov27 = Button(self.master,", "= 800, height = 600) master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2, width =", "text='Kelapertaar', command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44] text2", "text='DKI Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row = 13, column = 3, columnspan", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Udang", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38]", "text='Sumatera Utara', command=self.__prov2, width = 25) self.bprov2.grid(row = 2, column = 3, columnspan", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3)", "PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61] text2 = Text(master2,", "text2.pack() master2.mainloop() def __prov19(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\")", "= canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25] text2 = Text(master2, font =", "master2.mainloop() def jabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "text='Manday', command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "text1 = lst[41] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "columnspan = 4) master0.mainloop() def kalsel1(self): master2 = Tk() master2.minsize(width = 800, height", "text='Lawa', command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65] text2 = Text(master2, font = \"Arial", "__prov3(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DKI", "= 6, columnspan = 4) master0.mainloop() def bali1(self): master2 = Tk() master2.minsize(width =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5,", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jabar1(self):", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2,", "= canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23] text2 = Text(master2, font", "column = 7, columnspan = 4) self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width", "lst[21] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width = 25, height=3) master0.bprov1.grid(row", "text1) text2.pack() master2.mainloop() def kalbar2(self): master2 = Tk() master2.minsize(width = 800, height =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52]", "= 4, column = 3, columnspan = 4) self.bprov5 = Button(self.master, text='Kepulauan Riau',", "master2.mainloop() def __prov13(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= 4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width = 25, height=3) master0.bprov2.grid(row =", "BAnten\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "master2.mainloop() def __prov14(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= 1, column = 6, columnspan = 4) master0.mainloop() def kalbar1(self): master2 =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self):", "PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52] text2 = Text(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57] text2 = Text(master2, font = \"Arial 12\")", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self): master0 = Tk() master0.minsize(width = 450,", "self.bprov11.grid(row = 11, column = 3, columnspan = 4) self.bprov12 = Button(self.master, text='Jawa", "master2.mainloop() def __prov3(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "7, column = 7, columnspan = 4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width", "600) master2.title(\"Makanan tradisional Maluku Utara\") canvas = Canvas(master2, width = 300, height =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulgar1(self):", "pabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "command=self.__prov30, width = 25) self.bprov30.grid(row = 13, column = 7, columnspan = 4)", "img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66] text2", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Binte", "text2.pack() master2.mainloop() def jatim2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text2.pack() master2.mainloop() def __prov31(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "= 6, columnspan = 4) master0.mainloop() def sulsel1(self): master2 = Tk() master2.minsize(width =", "anchor=NW, image=img) text1 = lst[35] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width = 25, height=3) master0.bprov1.grid(row", "= 600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas = Canvas(master2, width = 300, height", "= 4) master0.mainloop() def goron1(self): master2 = Tk() master2.minsize(width = 800, height =", "text='Sulawesi Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row = 13, column = 7, columnspan", "column = 3, columnspan = 4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width =", "column = 6, columnspan = 4) master0.mainloop() def sulsel1(self): master2 = Tk() master2.minsize(width", "Barat', command=self.__prov3, width = 25) self.bprov3.grid(row = 3, column = 3, columnspan =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\")", "columnspan = 4) master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25, height=3)", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self): master0 = Tk() master0.minsize(width", "= 600) master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2, width = 300, height =", "800, height = 600) master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2, width = 300,", "master.minsize(width = 500, height = 600) master.maxsize(width = 500, height = 600) self.master.title(\"Selamat", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3)", "img = PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52] text2", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulgar1(self): master2", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58] text2 = Text(master2, font = \"Arial 12\")", "6, columnspan = 4) master0.mainloop() def riau1(self): master2 = Tk() master2.minsize(width = 800,", "anchor=NW, image=img) text1 = lst[15] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29]", "anchor=NW, image=img) text1 = lst[58] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self): master0 = Tk() master0.minsize(width = 450,", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2,", "\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self): master0", "= PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56] text2 =", "6, columnspan = 4) master0.mainloop() def sulgar1(self): master2 = Tk() master2.minsize(width = 800,", "= 7, columnspan = 4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width = 25)", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self): master0 = Tk() master0.minsize(width = 450, height", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self): master2 = Tk() master2.minsize(width = 800,", "canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28] text2 = Text(master2, font =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self): master0 = Tk() master0.minsize(width = 450,", "= 1, column = 6, columnspan = 4) master0.mainloop() def sumbar1(self): master2 =", "= canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42] text2 = Text(master2, font =", "25) self.bprov16.grid(row = 16, column = 3, columnspan = 4) self.bprov17 = Button(self.master,", "master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 14, column = 7, columnspan = 4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32,", "1, column = 6, columnspan = 4) master0.mainloop() def jabar1(self): master2 = Tk()", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width = 25, height=3)", "tradisional Bali\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img)", "Button(self.master, text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row = 2, column = 7, columnspan", "4) master0.mainloop() def jateng1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "master0.mainloop() def goron1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= 4) self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25) self.bprov5.grid(row =", "columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3)", "= 6, columnspan = 4) master0.mainloop() def goron1(self): master2 = Tk() master2.minsize(width =", "= canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16] text2 = Text(master2, font", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5,", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Utara\")", "anchor=NW, image=img) text1 = lst[6] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 800, height = 600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas = Canvas(master2, width", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width = 25,", "columnspan = 4) self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25) self.bprov31.grid(row", "image=img) text1 = lst[56] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62] text2 = Text(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65] text2 = Text(master2, font = \"Arial 12\")", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2,", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width", "= 6, columnspan = 4) master0.mainloop() def sulteng1(self): master2 = Tk() master2.minsize(width =", "tradisional Jawa Timur\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= lst[30] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "def kalut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tekwan", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self): master2", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self): master2 = Tk() master2.minsize(width = 800, height", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sumsel1(self):", "anchor=NW, image=img) text1 = lst[54] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "command=self.__prov1, width = 25) self.bprov1.grid(row = 1, column = 3, columnspan = 4)", "= Button(master0, text='Asidah', command=self.riau2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self): master0 = Tk() master0.minsize(width = 450, height", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Barat\")", "4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row = 1,", "def __prov25(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50]", "PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40] text2 =", "text2.pack() master2.mainloop() def __prov14(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47] text2 = Text(master2, font = \"Arial", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bengkulu\")", "text1 = lst[14] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11] text2", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width =", "column = 6, columnspan = 4) master0.mainloop() def sulteng1(self): master2 = Tk() master2.minsize(width", "command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "lst[4] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "master0.mainloop() def kalut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48] text2 = Text(master2, font", "Button(master0, text='<NAME>', command=self.papua1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3)", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Uta", "text='Tempoyak', command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sop", "canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67] text2", "def sumut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self): master0 = Tk()", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= 7, columnspan = 4) self.master.mainloop() def __prov1(self): master0 = Tk() master0.minsize(width =", "text1 = lst[17] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "text1 = lst[51] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Binte Biluhuta',", "= Button(master0, text='<NAME>', command=self.dki2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "column = 7, columnspan = 4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Gorontalo\") canvas =", "def __prov20(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width = 25,", "\"Arial 16 bold\") self.master.judul.grid(row = 0, column = 3, columnspan = 8) self.bprov1", "img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60] text2", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img)", "Kelo', command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "6, columnspan = 4) master0.mainloop() def kriau1(self): master2 = Tk() master2.minsize(width = 800,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self): master0 = Tk() master0.minsize(width = 450,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59] text2 = Text(master2, font = \"Arial", "text1) text2.pack() master2.mainloop() def sulgar2(self): master2 = Tk() master2.minsize(width = 800, height =", "text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row = 11, column = 3, columnspan =", "master0.mainloop() def sulgar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self): master0 =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tekwan Palembang',", "= canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45] text2 = Text(master2,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self): master2 = Tk() master2.minsize(width", "text1) text2.pack() master2.mainloop() def __prov6(self): master0 = Tk() master0.minsize(width = 450, height =", "= 4) master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width = 25,", "img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53] text2 =", "7, columnspan = 4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row", "text1) text2.pack() master2.mainloop() def jabar2(self): master2 = Tk() master2.minsize(width = 800, height =", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width = 25, height=3) master0.bprov1.grid(row =", "columnspan = 4) master0.mainloop() def sulsel1(self): master2 = Tk() master2.minsize(width = 800, height", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5,", "kalsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "yang ingin anda ketahui\", font = \"Arial 16 bold\") self.master.judul.grid(row = 0, column", "= Button(self.master, text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row = 2, column = 7,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self): master2 = Tk()", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width =", "lst[11] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450, height =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self): master0 = Tk() master0.minsize(width = 450,", "Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self): master2", "600) master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2, width = 300, height = 300)", "= 6, columnspan = 4) master0.mainloop() def kalsel1(self): master2 = Tk() master2.minsize(width =", "text1 = lst[29] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "height = 600) master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2, width = 300, height", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2, width", "text2.pack() master2.mainloop() def __prov16(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63] text2 = Text(master2, font =", "height = 600) master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2, width = 300, height", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Lampung\") canvas =", "papua1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "master0.mainloop() def aceh1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self): master2 = Tk() master2.minsize(width =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row = 11, column = 7,", "Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25) self.bprov16.grid(row = 16, column = 3,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kriau1(self): master2", "def __prov12(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self): master2 = Tk() master2.minsize(width =", "= 25) self.bprov24.grid(row = 7, column = 7, columnspan = 4) self.bprov25 =", "text1 = lst[3] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "lst[47] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width =", "25) self.bprov6.grid(row = 6, column = 3, columnspan = 4) self.bprov7 = Button(self.master,", "= 7, columnspan = 4) self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width =", "4) self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25) self.bprov2.grid(row = 2,", "4) self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row = 6,", "command=self.__prov34, width = 25) self.bprov34.grid(row = 17, column = 7, columnspan = 4)", "Button(self.master, text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row = 6, column = 3, columnspan", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW,", "text2.pack() master2.mainloop() def goron2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3)", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self): master0 = Tk() master0.minsize(width", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 4) self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row =", "self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row = 13, column", "command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width =", "master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row = 1,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self): master0 = Tk() master0.minsize(width =", "= lst[54] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "4) self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25) self.bprov26.grid(row = 9,", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self): master0 = Tk() master0.minsize(width =", "img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25] text2 =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self): master2", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self):", "11, column = 3, columnspan = 4) self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12,", "'' while('<end>' not in cmp): txt += cmp cmp = f.readline() lst.append(txt) line", "width = 25) self.bprov34.grid(row = 17, column = 7, columnspan = 4) self.master.mainloop()", "4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row = 8, column", "4) master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row =", "master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "command=self.__prov9, width = 25) self.bprov9.grid(row = 9, column = 3, columnspan = 4)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW,", "Sambas', command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width", "6, columnspan = 4) master0.mainloop() def beng1(self): master2 = Tk() master2.minsize(width = 800,", "= 4) master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width = 25, height=3) master0.bprov2.grid(row", "25) self.bprov24.grid(row = 7, column = 7, columnspan = 4) self.bprov25 = Button(self.master,", "= 6, columnspan = 4) master0.mainloop() def ntt1(self): master2 = Tk() master2.minsize(width =", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width =", "text1) text2.pack() master2.mainloop() def __prov19(self): master0 = Tk() master0.minsize(width = 450, height =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas = Canvas(master2,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\")", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 5, column = 3, columnspan = 4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6,", "= [] def readf(): with open('all.txt', 'r') as f: line = '' for", "= PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24] text2 = Text(master2,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self): master2", "img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22] text2", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width", "= 7, columnspan = 4) self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9]", "= Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bali\") canvas =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self): master0", "columnspan = 4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1,", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rujak", "4) master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row =", "= Label(self.master, text = \"Pilih provinsi yang ingin anda ketahui\", font = \"Arial", "__prov33(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width", "text1 = lst[62] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "img = PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54] text2", "canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7] text2", "25) self.bprov28.grid(row = 11, column = 7, columnspan = 4) self.bprov29 = Button(self.master,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def ntt1(self): master2", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 13, column = 3, columnspan = 4) self.bprov14 = Button(self.master, text='Jawa Tengah',", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Lampung\")", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44]", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self): master0 = Tk()", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5,", "Button(self.master, text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row = 15, column = 7, columnspan", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def goron1(self): master2", "img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67] text2 =", "= PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3] text2 =", "= Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "columnspan = 4) master0.mainloop() def kalbar1(self): master2 = Tk() master2.minsize(width = 800, height", "= Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17] text2 = Text(master2,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "command=self.papua2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "def __prov7(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "Tengah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "= 1, column = 6, columnspan = 4) master0.mainloop() def sulut1(self): master2 =", "self.master.judul = Label(self.master, text = \"Pilih provinsi yang ingin anda ketahui\", font =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Kudus.png\")", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas", "= Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25]", "6, columnspan = 4) master0.mainloop() def papua1(self): master2 = Tk() master2.minsize(width = 800,", "text1) text2.pack() master2.mainloop() def kriau2(self): master2 = Tk() master2.minsize(width = 800, height =", "column = 6, columnspan = 4) master0.mainloop() def malut1(self): master2 = Tk() master2.minsize(width", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row = 1,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self): master2 = Tk() master2.minsize(width =", "columnspan = 4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row", "2, column = 3, columnspan = 4) self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3,", "def __prov21(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "text='Mie Aceh', command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= Button(self.master, text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row = 8, column = 7,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan", "sulgar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text1 = lst[59] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25) self.bprov31.grid(row = 14, column", "= canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29] text2 = Text(master2, font", "4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row = 1, column", "600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas = Canvas(master2, width = 300, height =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self): master2 = Tk() master2.minsize(width =", "600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas = Canvas(master2, width = 300, height =", "image=img) text1 = lst[50] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "master2.title(\"Makanan tradisional Kepulauan Riau\") canvas = Canvas(master2, width = 300, height = 300)", "= Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "column = 7, columnspan = 4) self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def riau1(self): master2", "master2.mainloop() def dki2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas = Canvas(master2, width = 300, height =", "self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row = 9,", "PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38] text2 = Text(master2, font", "image=img) text1 = lst[12] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Ulat Sagu', command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "columnspan = 4) master0.mainloop() def sulteng1(self): master2 = Tk() master2.minsize(width = 800, height", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width = 25,", "Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45] text2 = Text(master2, font = \"Arial", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self): master2 = Tk()", "5, column = 7, columnspan = 4) self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23,", "4) master0.mainloop() def ntt1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img)", "Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29] text2 = Text(master2, font = \"Arial", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5,", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def ntt1(self):", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56]", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self): master2 =", "columnspan = 4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3) master0.bprov2.grid(row", "\"Arial 16 bold\") master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1", "PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3] text2 = Text(master2,", "PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54] text2 = Text(master2,", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width = 25,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self): master2 = Tk()", "__prov25(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width", "= 6, column = 7, columnspan = 4) self.bprov24 = Button(self.master, text='Kalimantan Timur',", "canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10] text2 = Text(master2, font = \"Arial", "self.bprov32.grid(row = 15, column = 7, columnspan = 4) self.bprov33 = Button(self.master, text='Papua", "= lst[4] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "master2.title(\"Makanan tradisional Bangka Belitung\") canvas = Canvas(master2, width = 300, height = 300)", "lamp1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gong Gong.png\")", "TEngah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width =", "master2.mainloop() def __prov24(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5] text2 = Text(master2, font = \"Arial 12\")", "sulsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "columnspan = 4) master0.mainloop() def sumut1(self): master2 = Tk() master2.minsize(width = 800, height", "text1) text2.pack() master2.mainloop() def __prov26(self): master0 = Tk() master0.minsize(width = 450, height =", "= lst[21] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "def malu2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW,", "lst[25] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Banten\")", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width", "master2.mainloop() def kalsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "lst[22] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "text='<NAME>', command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengah\")", "600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas = Canvas(master2, width = 300, height =", "6, columnspan = 4) master0.mainloop() def sumut1(self): master2 = Tk() master2.minsize(width = 800,", "command=self.bali2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self): master2 = Tk() master2.minsize(width", "600) master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2, width = 300, height = 300)", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas", "7, columnspan = 4) self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25)", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width = 25,", "command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "def goron1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25) self.bprov27.grid(row = 10, column", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self):", "text1) text2.pack() master2.mainloop() def __prov22(self): master0 = Tk() master0.minsize(width = 450, height =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2,", "class DirektoriMakanan(): def __init__(self, master=Tk()): self.master = master master.minsize(width = 500, height =", "7, columnspan = 4) self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25)", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "text1) text2.pack() master2.mainloop() def diy2(self): master2 = Tk() master2.minsize(width = 800, height =", "height = 600) master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2, width = 300, height", "= Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25,", "columnspan = 4) master0.mainloop() def sulbar1(self): master2 = Tk() master2.minsize(width = 800, height", "= Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67]", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self): master2 =", "= PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28] text2 =", "4) master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row =", "column = 6, columnspan = 4) master0.mainloop() def sumbar1(self): master2 = Tk() master2.minsize(width", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width", "image=img) text1 = lst[7] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "image=img) text1 = lst[67] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "text2.pack() master2.mainloop() def __prov29(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur Paddas", "image=img) text1 = lst[0] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\")", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Uta", "= lst[5] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img)", "def jateng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW,", "800, height = 600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas = Canvas(master2, width =", "= 6, columnspan = 4) master0.mainloop() def riau1(self): master2 = Tk() master2.minsize(width =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self): master2 = Tk()", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self): master2 = Tk() master2.minsize(width = 800, height", "self.bprov13.grid(row = 13, column = 3, columnspan = 4) self.bprov14 = Button(self.master, text='Jawa", "text1 = lst[2] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Mie Aceh.png\")", "600) master.maxsize(width = 500, height = 600) self.master.title(\"Selamat Datang di McDones (Direktori Macanan", "= PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67] text2 = Text(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56] text2 = Text(master2, font = \"Arial 12\")", "= canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22] text2 = Text(master2, font", "Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61] text2 = Text(master2, font = \"Arial", "= 7, columnspan = 4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width = 25)", "command=self.__prov32, width = 25) self.bprov32.grid(row = 15, column = 7, columnspan = 4)", "text='Soto Betawi', command=self.dki1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "columnspan = 4) master0.mainloop() def ntb1(self): master2 = Tk() master2.minsize(width = 800, height", "= Button(master0, text='<NAME>', command=self.malut2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "def pabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "6, columnspan = 4) master0.mainloop() def lamp1(self): master2 = Tk() master2.minsize(width = 800,", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width = 25,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self): master2 = Tk() master2.minsize(width", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width", "Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63] text2 = Text(master2, font = \"Arial", "text2.pack() master2.mainloop() def __prov20(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "kbang2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450, height", "Belitung', command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW,", "= 1, column = 6, columnspan = 4) master0.mainloop() def jabar1(self): master2 =", "4) master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5]", "column = 3, columnspan = 4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width =", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width", "kalteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas", "image=img) text1 = lst[65] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "not in line): line = f.readline() cmp = '' txt = '' while('<end>'", "= PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5] text2 =", "text2.pack() master2.mainloop() def __prov8(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self):", "= 4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row =", "PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6] text2 = Text(master2,", "master0.minsize(width = 450, height = 100) master0.maxsize(width = 450, height = 100) master0.title(\"Direktori", "columnspan = 4) self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self): master2 = Tk() master2.minsize(width", "columnspan = 4) self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25) self.bprov16.grid(row", "gong', command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47] text2", "4) self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row = 13,", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width", "= Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66] text2 = Text(master2, font = \"Arial", "= master master.minsize(width = 500, height = 600) master.maxsize(width = 500, height =", "13, column = 3, columnspan = 4) self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14,", "Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Banten\") canvas =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "master2.mainloop() def jatim2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "anchor=NW, image=img) text1 = lst[1] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self): master0 =", "lst[60] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas = Canvas(master2, width = 300, height", "Maluku\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self): master0 = Tk()", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "image=img) text1 = lst[16] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "command=self.diy2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "columnspan = 4) master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width = 25,", "__prov7(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "3, columnspan = 4) self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25)", "master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self): master2 = Tk() master2.minsize(width = 800,", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7]", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulbar1(self): master2", "PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21] text2 = Text(master2, font", "Label(self.master, text = \"Pilih provinsi yang ingin anda ketahui\", font = \"Arial 16", "PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22] text2 = Text(master2,", "+= cmp cmp = f.readline() lst.append(txt) line = f.readline() class DirektoriMakanan(): def __init__(self,", "Button(self.master, text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row = 17, column = 3, columnspan", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self): master0 = Tk() master0.minsize(width = 450, height", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self): master0 = Tk() master0.minsize(width = 450,", "1, column = 6, columnspan = 4) master0.mainloop() def papua1(self): master2 = Tk()", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self): master2 = Tk() master2.minsize(width = 800, height", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24] text2 = Text(master2, font = \"Arial 12\")", "= Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25) self.bprov20.grid(row = 3, column =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 800, height = 600) master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2, width =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width =", "columnspan = 4) master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25, height=3)", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self): master2 =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44] text2 = Text(master2, font = \"Arial 12\")", "= Button(master0, text='Papeda', command=self.papua2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 4) master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width = 25, height=3)", "text2.pack() master2.mainloop() def __prov5(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row = 13, column = 3, columnspan =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self): master0 = Tk() master0.minsize(width = 450, height", "height = 600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas = Canvas(master2, width = 300,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW,", "= 600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas = Canvas(master2, width = 300, height", "self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row = 15, column", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19] text2 = Text(master2, font = \"Arial 12\")", "master0.mainloop() def ban1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bali\") canvas", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTB\") canvas =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self): master2", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3)", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rujak Cingur',", "12, column = 7, columnspan = 4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self):", "text2.pack() master2.mainloop() def jabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku", "= Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self): master0 = Tk() master0.minsize(width = 450,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img)", "800, height = 600) master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2, width = 300,", "Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62] text2 = Text(master2, font =", "= 4) master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row", "__init__(self, master=Tk()): self.master = master master.minsize(width = 500, height = 600) master.maxsize(width =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW,", "master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "def sulbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59] text2 =", "__prov24(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "4) self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25) self.bprov27.grid(row = 10,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5,", "25) self.bprov8.grid(row = 8, column = 3, columnspan = 4) self.bprov9 = Button(self.master,", "= 800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional", "master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas = Canvas(master2, width = 300, height = 300)", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimnantan Timur\")", "= lst[11] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "lst[0] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48] text2 =", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>',", "text='Lepek Binti', command=self.beng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "4) master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row =", "def __prov17(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= lst[16] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "img = PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32] text2", "width = 25) self.bprov7.grid(row = 7, column = 3, columnspan = 4) self.bprov8", "4) master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width", "= Button(master0, text='<NAME>', command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW,", "command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width =", "text2.pack() master2.mainloop() def papua2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "self.bprov19.grid(row = 2, column = 7, columnspan = 4) self.bprov20 = Button(self.master, text='Kalimantan", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self): master2", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self): master2 = Tk() master2.minsize(width = 800,", "columnspan = 4) master0.mainloop() def pabar1(self): master2 = Tk() master2.minsize(width = 800, height", "= PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35] text2 =", "text='DI Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row = 15, column = 3, columnspan", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width", "= 600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas = Canvas(master2, width = 300, height", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self): master0 = Tk() master0.minsize(width = 450, height", "master2.mainloop() def kalbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "riau2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 600) master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2, width = 300, height =", "canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15] text2 = Text(master2, font =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self): master0 = Tk() master0.minsize(width =", "Konro', command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "= 600) master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2, width = 300, height =", "width = 25) self.bprov29.grid(row = 12, column = 7, columnspan = 4) self.bprov30", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 4) master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row", "600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas = Canvas(master2, width = 300, height =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width = 25, height=3)", "master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width = 25,", "text1) text2.pack() master2.mainloop() def goron2(self): master2 = Tk() master2.minsize(width = 800, height =", "text='Sop Konro', command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row = 1,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self): master0 = Tk() master0.minsize(width = 450, height", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width =", "command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "anchor=NW, image=img) text1 = lst[51] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "text='Binte Biluhuta', command=self.goron1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "sulut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self): master0 = Tk() master0.minsize(width = 450, height", "= 4) master0.mainloop() def pabar1(self): master2 = Tk() master2.minsize(width = 800, height =", "text='Tiwul', command=self.diy1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width =", "command=self.__prov13, width = 25) self.bprov13.grid(row = 13, column = 3, columnspan = 4)", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "800, height = 600) master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2, width = 300,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gong gong',", "= 600) master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2, width = 300, height =", "= 25) self.bprov33.grid(row = 16, column = 7, columnspan = 4) self.bprov34 =", "text1) text2.pack() master2.mainloop() def __prov3(self): master0 = Tk() master0.minsize(width = 450, height =", "text1 = lst[54] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas = Canvas(master2, width = 300, height =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5,", "anchor=NW, image=img) text1 = lst[39] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self): master2 = Tk() master2.minsize(width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self): master2 = Tk() master2.minsize(width =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self): master2 = Tk() master2.minsize(width", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Seruit Lampung',", "columnspan = 4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row", "tradisional Sulawesi Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 7, columnspan = 4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width = 25)", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\")", "canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48] text2 = Text(master2, font =", "text2.pack() master2.mainloop() def __prov11(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14]", "def jabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "lst[43] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "image=img) text1 = lst[9] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Button(master0, text='Lepek Binti', command=self.beng1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "image=img) text1 = lst[41] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Kalimantan Selatan\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 25) self.bprov3.grid(row = 3, column = 3, columnspan = 4) self.bprov4 =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self):", "4) self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row = 11,", "text='Nasi Sumsum', command=self.ban1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "3, columnspan = 4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row", "columnspan = 4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width = 25,", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Utara\")", "DirektoriMakanan(): def __init__(self, master=Tk()): self.master = master master.minsize(width = 500, height = 600)", "= 25) self.bprov30.grid(row = 13, column = 7, columnspan = 4) self.bprov31 =", "text='<NAME>', command=self.bali1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "__prov27(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53] text2 = Text(master2,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58] text2", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width = 25, height=3)", "column = 6, columnspan = 4) master0.mainloop() def sulbar1(self): master2 = Tk() master2.minsize(width", "4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3) master0.bprov1.grid(row = 1,", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width =", "#semoga lancar #aminnn from tkinter import * lst = [] def readf(): with", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Asam Banjar', command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "#project akhir mpkta #semoga lancar #aminnn from tkinter import * lst = []", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self): master0 = Tk() master0.minsize(width", "master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row =", "master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width =", "def __prov18(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jambi1(self):", "1, column = 6, columnspan = 4) master0.mainloop() def diy1(self): master2 = Tk()", "600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas = Canvas(master2, width = 300, height =", "4) self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25) self.bprov20.grid(row = 3,", "text='Bilenthango', command=self.goron2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DI", "4) self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25) self.bprov16.grid(row = 16,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self): master2 = Tk() master2.minsize(width", "def kalsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "master2.title(\"Makanan tradisional Sumatera Selatan\") canvas = Canvas(master2, width = 300, height = 300)", "width = 25) self.bprov3.grid(row = 3, column = 3, columnspan = 4) self.bprov4", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self): master2", "img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17] text2", "= f.readline() lst.append(txt) line = f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()): self.master =", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Mie Aceh',", "= 4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row = 2,", "width = 25) self.bprov15.grid(row = 15, column = 3, columnspan = 4) self.bprov16", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTB\")", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width = 25, height=3)", "img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50] text2 =", "PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65] text2 =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self): master2 = Tk() master2.minsize(width", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "Banjar', command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row = 1,", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Selatan\")", "canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29] text2 = Text(master2, font =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self): master0 = Tk() master0.minsize(width =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self): master2 =", "text1 = lst[13] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "__prov14(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "font = \"Arial 16 bold\") master0.judul.grid(row = 0, column = 2, columnspan =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self): master0 =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW,", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25,", "image=img) text1 = lst[39] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "lst.append(txt) line = f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()): self.master = master master.minsize(width", "columnspan = 4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row =", "def __prov8(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= lst[18] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width", "= PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39] text2 =", "def __prov34(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "25) self.bprov15.grid(row = 15, column = 3, columnspan = 4) self.bprov16 = Button(self.master,", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width = 25, height=3)", "Utara', command=self.__prov20, width = 25) self.bprov20.grid(row = 3, column = 7, columnspan =", "tradisional Kalimantan Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "7, columnspan = 4) self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25)", "Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25) self.bprov31.grid(row = 14, column = 7,", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width = 25, height=3) master0.bprov2.grid(row", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit", "= 25) self.bprov10.grid(row = 10, column = 3, columnspan = 4) self.bprov11 =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5,", "= 800, height = 600) master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2, width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5,", "3, columnspan = 4) self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25)", "image=img) text1 = lst[5] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2,", "text1) text2.pack() master2.mainloop() def sumbar2(self): master2 = Tk() master2.minsize(width = 800, height =", "= 800, height = 600) master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2, width =", "img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26] text2 =", "1, column = 6, columnspan = 4) master0.mainloop() def kalteng1(self): master2 = Tk()", "anchor=NW, image=img) text1 = lst[61] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Macanan Tradisional)\") self.master.judul = Label(self.master, text = \"Pilih provinsi yang ingin anda ketahui\",", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW,", "columnspan = 4) self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25) self.bprov20.grid(row", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self): master2 = Tk() master2.minsize(width =", "in range(68): while('<deskripsi>' not in line): line = f.readline() cmp = '' txt", "canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4] text2", "Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33] text2 = Text(master2, font = \"Arial", "Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self): master0 = Tk() master0.minsize(width", "= Button(self.master, text='Papua Barat', command=self.__prov33, width = 25) self.bprov33.grid(row = 16, column =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self): master0 =", "anchor=NW, image=img) text1 = lst[48] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master2.mainloop() def malut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas = Canvas(master2, width = 300, height = 300)", "Button(master0, text='<NAME>', command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "lst[62] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "self.master.title(\"Selamat Datang di McDones (Direktori Macanan Tradisional)\") self.master.judul = Label(self.master, text = \"Pilih", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5,", "= 3, columnspan = 4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width = 25)", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat\")", "column = 7, columnspan = 4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img)", "Barat', command=self.__prov21, width = 25) self.bprov21.grid(row = 4, column = 7, columnspan =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11] text2 = Text(master2,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self): master2", "columnspan = 4) master0.mainloop() def dki1(self): master2 = Tk() master2.minsize(width = 800, height", "Kalimnantan Timur\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5] text2 = Text(master2, font =", "canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57] text2 = Text(master2, font =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self): master0 = Tk()", "width = 25) self.bprov26.grid(row = 9, column = 7, columnspan = 4) self.bprov27", "Button(master0, text='<NAME>', command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= 7, columnspan = 4) self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width =", "command=self.malut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "Button(master0, text='Kue Timpan', command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19] text2 = Text(master2, font", "800, height = 600) master2.title(\"Makanan tradisional Jawa Barat\") canvas = Canvas(master2, width =", "600) master2.title(\"Makanan tradisional Jawa Barat\") canvas = Canvas(master2, width = 300, height =", "= Button(master0, text='<NAME>', command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gulai", "text2.pack() master2.mainloop() def kalut2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25,", "master2.mainloop() def pabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "6, columnspan = 4) master0.mainloop() def diy1(self): master2 = Tk() master2.minsize(width = 800,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46]", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width = 25,", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width = 25, height=3) master0.bprov1.grid(row = 1,", "font = \"Arial 16 bold\") self.master.judul.grid(row = 0, column = 3, columnspan =", "= PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7] text2 = Text(master2,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self): master0 = Tk()", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53] text2 = Text(master2, font = \"Arial 12\")", "= 800, height = 600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas = Canvas(master2, width", "= lst[9] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self):", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self): master2 = Tk() master2.minsize(width =", "= 7, columnspan = 4) self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width =", "Kalimantan Timur\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "Sumatera Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row =", "height = 600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas = Canvas(master2, width = 300,", "= 1, column = 6, columnspan = 4) master0.mainloop() def sulgar1(self): master2 =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self): master2 =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self): master2 = Tk() master2.minsize(width", "columnspan = 4) master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25, height=3)", "in line): line = f.readline() cmp = '' txt = '' while('<end>' not", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width", "ntt2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self): master2 = Tk() master2.minsize(width = 800,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2,", "= lst[1] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19] text2 = Text(master2,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self): master0 =", "PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43] text2 = Text(master2, font", "= 600) master2.title(\"Makanan tradisional Maluku Utara\") canvas = Canvas(master2, width = 300, height", "15, column = 7, columnspan = 4) self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33,", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bika", "= canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26] text2 = Text(master2, font =", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jateng1(self): master2", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "lst[39] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 4) self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row =", "self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row = 8, column", "text1 = lst[10] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row = 9, column", "range(68): while('<deskripsi>' not in line): line = f.readline() cmp = '' txt =", "width = 25) self.bprov31.grid(row = 14, column = 7, columnspan = 4) self.bprov32", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self): master2 = Tk() master2.minsize(width =", "text='Tinutuan', command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25) self.bprov2.grid(row = 2, column =", "command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "image=img) text1 = lst[19] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1,", "command=self.__prov33, width = 25) self.bprov33.grid(row = 16, column = 7, columnspan = 4)", "sulgar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalsel1(self): master2", "canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62] text2 = Text(master2, font", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self): master0 = Tk() master0.minsize(width = 450,", "= 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width = 25, height=3) master0.bprov2.grid(row =", "= 4) master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width = 25, height=3) master0.bprov1.grid(row", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62] text2 = Text(master2, font = \"Arial 12\")", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek Binti.png\")", "master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "= 4) master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self): master2 = Tk() master2.minsize(width = 800,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sambal Colo-colo',", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self): master2 =", "lst[6] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas = Canvas(master2, width", "= 6, columnspan = 4) master0.mainloop() def kalbar1(self): master2 = Tk() master2.minsize(width =", "Riau\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self): master2 =", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1,", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2,", "4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row = 4, column", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "4) master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width = 25, height=3) master0.bprov2.grid(row =", "= PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32] text2 =", "self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width = 25) self.bprov33.grid(row = 16, column", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas = Canvas(master2,", "command=self.__prov28, width = 25) self.bprov28.grid(row = 11, column = 7, columnspan = 4)", "25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan Asam", "height = 600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas = Canvas(master2, width = 300,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self): master0 = Tk() master0.minsize(width = 450, height", "command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self): master0 = Tk()", "lancar #aminnn from tkinter import * lst = [] def readf(): with open('all.txt',", "text='<NAME>', command=self.ban2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25) self.bprov21.grid(row = 4, column", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self): master0 = Tk()", "4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width = 25, height=3) master0.bprov2.grid(row = 1,", "master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width = 25, height=3) master0.bprov1.grid(row = 1,", "kaltim2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bika Ambon',", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self): master2 = Tk() master2.minsize(width", "tradisional Papua Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width", "columnspan = 4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row =", "= 4) master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25, height=3) master0.bprov1.grid(row", "PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25] text2 = Text(master2, font", "text1) text2.pack() master2.mainloop() def aceh2(self): master2 = Tk() master2.minsize(width = 800, height =", "text1) text2.pack() master2.mainloop() def jambi2(self): master2 = Tk() master2.minsize(width = 800, height =", "columnspan = 4) master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5,", "master2.mainloop() def goron2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def riau1(self):", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self): master2 = Tk() master2.minsize(width = 800, height", "tradisional Sulawesi Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "4) master0.mainloop() def ntb1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37] text2 = Text(master2, font = \"Arial", "canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59] text2", "lst[20] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3)", "= 1, column = 6, columnspan = 4) master0.mainloop() def sumsel1(self): master2 =", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Aceh\")", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Ampiang", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54] text2 = Text(master2, font = \"Arial 12\")", "25) self.bprov13.grid(row = 13, column = 3, columnspan = 4) self.bprov14 = Button(self.master,", "25) self.bprov23.grid(row = 6, column = 7, columnspan = 4) self.bprov24 = Button(self.master,", "img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30] text2", "= PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66] text2 =", "lst[29] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26] text2 = Text(master2, font = \"Arial 12\")", "master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas = Canvas(master2, width = 300, height = 300)", "1, column = 6, columnspan = 4) master0.mainloop() def sumbar1(self): master2 = Tk()", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\")", "def sulteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width = 25, height=3)", "def kalteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "Sulawesi Tengah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Apang Bugis',", "master2.title(\"Makanan tradisional Maluku Utara\") canvas = Canvas(master2, width = 300, height = 300)", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self): master0 =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self): master0 = Tk() master0.minsize(width = 450, height", "600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas = Canvas(master2, width = 300, height =", "def kalbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 4) master0.mainloop() def malut1(self): master2 = Tk() master2.minsize(width = 800, height =", "kriau2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self): master2 =", "'' for i in range(68): while('<deskripsi>' not in line): line = f.readline() cmp", "= 800, height = 600) master2.title(\"Makanan tradisional Jawa Barat\") canvas = Canvas(master2, width", "text1) text2.pack() master2.mainloop() def kalut2(self): master2 = Tk() master2.minsize(width = 800, height =", "= 1, column = 6, columnspan = 4) master0.mainloop() def kalteng1(self): master2 =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas = Canvas(master2,", "600) master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2, width = 300, height = 300)", "ketahui\", font = \"Arial 16 bold\") self.master.judul.grid(row = 0, column = 3, columnspan", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self): master0 = Tk() master0.minsize(width = 450, height", "lst[19] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "columnspan = 4) master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width = 25,", "as f: line = '' for i in range(68): while('<deskripsi>' not in line):", "command=self.__prov18, width = 25) self.bprov18.grid(row = 1, column = 7, columnspan = 4)", "text='Kalimantan Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row = 5, column = 7, columnspan", "anchor=NW, image=img) text1 = lst[34] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "tradisional Banten\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self):", "self.bprov2.grid(row = 2, column = 3, columnspan = 4) self.bprov3 = Button(self.master, text='Sumatera", "= canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55] text2 = Text(master2, font =", "lst[33] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3] text2 = Text(master2, font = \"Arial 12\")", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self): master2 = Tk() master2.minsize(width = 800, height", "text1) text2.pack() master2.mainloop() def __prov2(self): master0 = Tk() master0.minsize(width = 450, height =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTT\") canvas =", "self.bprov3.grid(row = 3, column = 3, columnspan = 4) self.bprov4 = Button(self.master, text='Riau',", "self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row = 5, column", "def lamp1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "columnspan = 4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row =", "Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30] text2 = Text(master2, font = \"Arial", "= 7, column = 3, columnspan = 4) self.bprov8 = Button(self.master, text='Sumatera Selatan',", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1,", "= 25) self.bprov22.grid(row = 5, column = 7, columnspan = 4) self.bprov23 =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self): master2 = Tk() master2.minsize(width", "def sumsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self): master0 =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37]", "4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3) master0.bprov2.grid(row = 1,", "4) master0.mainloop() def kalbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41]", "def __prov15(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self): master0 = Tk() master0.minsize(width = 450,", "PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12] text2 = Text(master2,", "= canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41] text2 = Text(master2, font =", "= PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57] text2 =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width =", "master2.mainloop() def __prov32(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self):", "columnspan = 4) self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25) self.bprov26.grid(row", "columnspan = 4) master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3)", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self):", "6, columnspan = 4) master0.mainloop() def sumbar1(self): master2 = Tk() master2.minsize(width = 800,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self): master2 = Tk() master2.minsize(width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17]", "4) master0.mainloop() def sulsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "def malu1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "text='Kasoami', command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW,", "text2.pack() master2.mainloop() def __prov7(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "anchor=NW, image=img) text1 = lst[14] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "cmp cmp = f.readline() lst.append(txt) line = f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()):", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52] text2 = Text(master2, font = \"Arial 12\")", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self): master0 = Tk() master0.minsize(width", "text='Gulai Taboh', command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "text1 = lst[55] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "master2.mainloop() def __prov19(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "text1) text2.pack() master2.mainloop() def __prov16(self): master0 = Tk() master0.minsize(width = 450, height =", "columnspan = 4) master0.mainloop() def papua1(self): master2 = Tk() master2.minsize(width = 800, height", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5,", "600) master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2, width = 300, height = 300)", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def lamp1(self):", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self):", "self.bprov26.grid(row = 9, column = 7, columnspan = 4) self.bprov27 = Button(self.master, text='Sulawesi", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width", "6, columnspan = 4) master0.mainloop() def kalteng1(self): master2 = Tk() master2.minsize(width = 800,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self): master2 = Tk()", "kbang1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42] text2 = Text(master2, font = \"Arial 12\")", "canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60]", "tradisional Maluku Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row = 1,", "PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5] text2 = Text(master2,", "def __prov19(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58] text2 = Text(master2, font =", "9, column = 3, columnspan = 4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() if __name__", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5,", "image=img) text1 = lst[27] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45] text2 = Text(master2, font =", "= 4) master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width = 25, height=3)", "bold\") self.master.judul.grid(row = 0, column = 3, columnspan = 8) self.bprov1 = Button(self.master,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self): master0 = Tk()", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Riau\") canvas =", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1,", "= lst[43] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\")", "columnspan = 4) master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width = 25, height=3)", "text='<NAME>u', command=self.bali2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "anchor=NW, image=img) text1 = lst[67] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "jabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 4) master0.mainloop() def kalsel1(self): master2 = Tk() master2.minsize(width = 800, height =", "columnspan = 4) master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width = 25, height=3)", "= Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25) self.bprov31.grid(row = 14, column =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0]", "lst[1] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak", "text1) text2.pack() master2.mainloop() def __prov10(self): master0 = Tk() master0.minsize(width = 450, height =", "= canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31] text2 = Text(master2, font =", "Maluku Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\")", "width = 25) self.bprov16.grid(row = 16, column = 3, columnspan = 4) self.bprov17", "master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas = Canvas(master2, width = 300, height = 300)", "= lst[20] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4] text2 = Text(master2, font =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self): master0 =", "= 4) self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row =", "text2.pack() master2.mainloop() def beng2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7] text2 =", "command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self): master0 = Tk() master0.minsize(width =", "25) self.bprov5.grid(row = 5, column = 3, columnspan = 4) self.bprov6 = Button(self.master,", "tradisional DKI Jakarta\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 7, columnspan = 4) self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width =", "self.bprov33.grid(row = 16, column = 7, columnspan = 4) self.bprov34 = Button(self.master, text='Papua',", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self):", "Bangka Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row = 9, column = 3, columnspan", "= 4) master0.mainloop() def ntt1(self): master2 = Tk() master2.minsize(width = 800, height =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5,", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3)", "17, column = 7, columnspan = 4) self.master.mainloop() def __prov1(self): master0 = Tk()", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self): master2 = Tk() master2.minsize(width", "lst[51] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row = 1,", "img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41] text2 =", "text2.pack() master2.mainloop() def sulteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kue", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak", "= 4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan", "anchor=NW, image=img) text1 = lst[29] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 25) self.bprov7.grid(row = 7, column = 3, columnspan = 4) self.bprov8 =", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2,", "= Button(master0, text='<NAME>', command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\")", "master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self): master0 = Tk()", "25) self.bprov20.grid(row = 3, column = 7, columnspan = 4) self.bprov21 = Button(self.master,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self): master2 = Tk() master2.minsize(width", "Betawi', command=self.dki1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def goron1(self):", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Riau\")", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur", "10, column = 3, columnspan = 4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47]", "4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row = 1,", "img = PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57] text2", "Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25) self.bprov27.grid(row = 10, column = 7,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self):", "master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50] text2 = Text(master2,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self): master0 = Tk()", "= Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= Button(master0, text='Soto Kudus', command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "anchor=NW, image=img) text1 = lst[3] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "kalbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\")", "columnspan = 4) master0.mainloop() def aceh1(self): master2 = Tk() master2.minsize(width = 800, height", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def aceh1(self): master2", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self): master2 =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6] text2 = Text(master2, font = \"Arial 12\")", "command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "malu2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "master2.mainloop() def sumsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kepulauan Riau\")", "Makassar', command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27] text2 = Text(master2, font =", "4) self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row = 5,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "master2.title(\"Makanan tradisional Kalimantan Utara\") canvas = Canvas(master2, width = 300, height = 300)", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img)", "anchor=NW, image=img) text1 = lst[47] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas = Canvas(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31] text2 = Text(master2, font = \"Arial 12\")", "= 3, columnspan = 4) self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width", "4) master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row =", "Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64] text2 = Text(master2, font = \"Arial", "text='Bika Ambon', command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40] text2 = Text(master2, font =", "anchor=NW, image=img) text1 = lst[46] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self): master2 = Tk() master2.minsize(width =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self): master0 = Tk() master0.minsize(width", "columnspan = 4) master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25, height=3)", "self.bprov4.grid(row = 4, column = 3, columnspan = 4) self.bprov5 = Button(self.master, text='Kepulauan", "= canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6] text2 = Text(master2, font", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Luti Gendang.png\")", "= canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5] text2 = Text(master2, font", "= 600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas = Canvas(master2, width = 300, height", "4) master0.mainloop() def dki1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text1 = lst[58] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "columnspan = 4) self.master.mainloop() def __prov1(self): master0 = Tk() master0.minsize(width = 450, height", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW,", "lst[27] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self): master2 = Tk()", "Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row = 13, column = 3,", "= 4) master0.mainloop() def kalbar1(self): master2 = Tk() master2.minsize(width = 800, height =", "text='Sulawesi Utara', command=self.__prov26, width = 25) self.bprov26.grid(row = 9, column = 7, columnspan", "canvas.pack() img = PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57]", "def __prov9(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self): master2", "800, height = 600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas = Canvas(master2, width =", "text='Asidah', command=self.riau2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= 500, height = 600) master.maxsize(width = 500, height = 600) self.master.title(\"Selamat Datang", "= Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row = 1,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11]", "4) master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width = 25, height=3) master0.bprov1.grid(row =", "Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18] text2 = Text(master2, font = \"Arial", "width = 25) self.bprov33.grid(row = 16, column = 7, columnspan = 4) self.bprov34", "Gerombyang', command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self): master0 = Tk() master0.minsize(width =", "__prov15(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= 1, column = 6, columnspan = 4) master0.mainloop() def jateng1(self): master2 =", "6, columnspan = 4) master0.mainloop() def kalbar1(self): master2 = Tk() master2.minsize(width = 800,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self): master2 =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5,", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width =", "= 4) master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25, height=3) master0.bprov1.grid(row", "master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2,", "= canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61] text2 = Text(master2, font", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalteng1(self): master2", "image=img) text1 = lst[53] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43] text2 = Text(master2, font = \"Arial 12\")", "column = 6, columnspan = 4) master0.mainloop() def sumsel1(self): master2 = Tk() master2.minsize(width", "column = 3, columnspan = 4) self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58]", "text='Seruit Lampung', command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28] text2 = Text(master2, font", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "image=img) text1 = lst[63] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "img = PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28] text2", "column = 3, columnspan = 4) self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width", "column = 6, columnspan = 4) master0.mainloop() def lamp1(self): master2 = Tk() master2.minsize(width", "1, column = 6, columnspan = 4) master0.mainloop() def kaltim1(self): master2 = Tk()", "text2.pack() master2.mainloop() def sulbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal Colo", "= 6, columnspan = 4) master0.mainloop() def sumut1(self): master2 = Tk() master2.minsize(width =", "= 4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3) master0.bprov1.grid(row =", "Button(master0, text='<NAME>', command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "self.bprov5.grid(row = 5, column = 3, columnspan = 4) self.bprov6 = Button(self.master, text='Jambi',", "ban2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= lst[0] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2,", "command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "def ntt1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "anchor=NW, image=img) text1 = lst[23] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 6, columnspan = 4) master0.mainloop() def lamp1(self): master2 = Tk() master2.minsize(width =", "text2.pack() master2.mainloop() def sulsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5,", "Button(self.master, text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row = 11, column = 3, columnspan", "4) master0.mainloop() def sumsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalsel1(self):", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self): master2 = Tk() master2.minsize(width", "text1 = lst[7] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "25) self.bprov14.grid(row = 14, column = 3, columnspan = 4) self.bprov15 = Button(self.master,", "6, columnspan = 4) master0.mainloop() def jatim1(self): master2 = Tk() master2.minsize(width = 800,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\")", "17, column = 3, columnspan = 4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self): master0 = Tk() master0.minsize(width", "= PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10] text2 = Text(master2,", "= 1, column = 6, columnspan = 4) master0.mainloop() def kalsel1(self): master2 =", "text1 = lst[46] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "width = 25) self.bprov24.grid(row = 7, column = 7, columnspan = 4) self.bprov25", "Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25) self.bprov26.grid(row = 9, column = 7,", "command=self.goron2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "3, columnspan = 4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua\") canvas =", "width = 25) self.bprov2.grid(row = 2, column = 3, columnspan = 4) self.bprov3", "600) master2.title(\"Makanan tradisional Sulawesi Barat \") canvas = Canvas(master2, width = 300, height", "self.bprov28.grid(row = 11, column = 7, columnspan = 4) self.bprov29 = Button(self.master, text='Sulawesi", "height = 100) master0.maxsize(width = 450, height = 100) master0.title(\"Direktori Makanan Tradisional Nusantara\")", "Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9] text2 = Text(master2, font = \"Arial", "600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas = Canvas(master2, width = 300, height =", "= Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36] text2", "master0.mainloop() def bali1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2] text2 = Text(master2, font", "img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35] text2", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self): master0 =", "tradisional Kepulauan Riau\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row = 6, column =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self): master0 = Tk() master0.minsize(width = 450, height", "image=img) text1 = lst[33] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width =", "PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36] text2 = Text(master2, font", "= 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas = Canvas(master2, width", "4) master0.mainloop() def kaltim1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "column = 6, columnspan = 4) master0.mainloop() def kbang1(self): master2 = Tk() master2.minsize(width", "PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48] text2 = Text(master2,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2,", "image=img) text1 = lst[28] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self): master0 = Tk() master0.minsize(width", "text1) text2.pack() master2.mainloop() def __prov13(self): master0 = Tk() master0.minsize(width = 450, height =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self): master2 = Tk() master2.minsize(width = 800,", "columnspan = 4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah", "img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37] text2", "text2.pack() master2.mainloop() def __prov28(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "command=self.papua1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "Manokwari', command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "master2.mainloop() def __prov2(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= 3, columnspan = 4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width = 25)", "3, columnspan = 4) self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25)", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width = 25,", "800, height = 600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas = Canvas(master2, width =", "= Button(self.master, text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row = 4, column = 3,", "text1) text2.pack() master2.mainloop() def __prov15(self): master0 = Tk() master0.minsize(width = 450, height =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self): master0 = Tk() master0.minsize(width = 450, height", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "image=img) text1 = lst[10] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13] text2 = Text(master2, font = \"Arial", "Button(master0, text='Papeda', command=self.papua2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "600) master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2, width = 300, height = 300)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5,", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas =", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gangan", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self):", "master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64] text2 = Text(master2, font =", "tradisional Gorontalo\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2, width", "text1 = lst[67] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "4) master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25, height=3) master0.bprov1.grid(row =", "7, columnspan = 4) self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25)", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self): master0 =", "command=self.ban1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "6, columnspan = 4) master0.mainloop() def ban1(self): master2 = Tk() master2.minsize(width = 800,", "columnspan = 4) master0.mainloop() def goron1(self): master2 = Tk() master2.minsize(width = 800, height", "= 6, columnspan = 4) master0.mainloop() def beng1(self): master2 = Tk() master2.minsize(width =", "= 6, columnspan = 4) master0.mainloop() def dki1(self): master2 = Tk() master2.minsize(width =", "tradisional Bangka Belitung\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "4) master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25, height=3) master0.bprov1.grid(row =", "command=self.__prov26, width = 25) self.bprov26.grid(row = 9, column = 7, columnspan = 4)", "text2.pack() master2.mainloop() def kaltim2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "tradisional Bengkulu\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30] text2 = Text(master2, font = \"Arial 12\")", "= Button(master0, text='Kue Timpan', command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulteng1(self):", "4) self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25) self.bprov24.grid(row = 7,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self): master2 = Tk()", "= canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53] text2 = Text(master2, font =", "600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas = Canvas(master2, width = 300, height =", "Datang di McDones (Direktori Macanan Tradisional)\") self.master.judul = Label(self.master, text = \"Pilih provinsi", "Nusantara\") master0.judul = Label(master0, text = \"Pilih makanan :)\", font = \"Arial 16", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5,", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Manday',", "anchor=NW, image=img) text1 = lst[20] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sinonggi',", "= Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row = 1,", "25) self.bprov4.grid(row = 4, column = 3, columnspan = 4) self.bprov5 = Button(self.master,", "25) self.bprov25.grid(row = 8, column = 7, columnspan = 4) self.bprov26 = Button(self.master,", "DI Yogyakarta\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42] text2 = Text(master2, font", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self): master2 = Tk() master2.minsize(width =", "= 4) master0.mainloop() def malu1(self): master2 = Tk() master2.minsize(width = 800, height =", "= 3, columnspan = 4) self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width =", "master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25, height=3) master0.bprov1.grid(row = 1,", "master2.mainloop() def malu2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas = Canvas(master2, width = 300, height =", "beng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "3, columnspan = 4) self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25)", "command=self.__prov20, width = 25) self.bprov20.grid(row = 3, column = 7, columnspan = 4)", "Button(self.master, text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row = 1, column = 7, columnspan", "command=self.__prov21, width = 25) self.bprov21.grid(row = 4, column = 7, columnspan = 4)", "master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5,", "text1 = lst[52] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "height = 600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas = Canvas(master2, width = 300,", "lst[13] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39] text2", "malut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450, height = 100)", "self.bprov30.grid(row = 13, column = 7, columnspan = 4) self.bprov31 = Button(self.master, text='Maluku", "800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas = Canvas(master2, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26] text2", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48] text2 = Text(master2, font = \"Arial 12\")", "image=img) text1 = lst[2] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45] text2 = Text(master2, font = \"Arial 12\")", "self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row = 8, column =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "sulbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row = 11, column = 7, columnspan =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bali\")", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self): master2 =", "def aceh2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas = Canvas(master2, width = 300, height", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width = 25,", "img = PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9] text2", "text1 = lst[19] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self): master2 = Tk() master2.minsize(width", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku\") canvas =", "text2.pack() master2.mainloop() def __prov22(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()): self.master = master master.minsize(width = 500, height", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\")", "= 600) master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2, width = 300, height =", "= 4) master0.mainloop() def sulgar1(self): master2 = Tk() master2.minsize(width = 800, height =", "img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48] text2", "Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row = 8, column = 3,", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW,", "img = PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1] text2", "= canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40] text2 = Text(master2,", "pabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img)", "command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "jateng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row = 4, column =", "800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas = Canvas(master2, width =", "not in cmp): txt += cmp cmp = f.readline() lst.append(txt) line = f.readline()", "def jateng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row = 15, column = 7, columnspan =", "= lst[32] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "text1) text2.pack() master2.mainloop() def __prov29(self): master0 = Tk() master0.minsize(width = 450, height =", "= PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62] text2", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width = 25,", "800, height = 600) master2.title(\"Makanan tradisional Papua Barat\") canvas = Canvas(master2, width =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self): master0 = Tk() master0.minsize(width =", "3, columnspan = 4) self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width =", "= 4) master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width = 25, height=3) master0.bprov1.grid(row", "= 4) master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row", "command=self.goron1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= 25) self.bprov8.grid(row = 8, column = 3, columnspan = 4) self.bprov9 =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self):", "Palembang', command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width = 25,", "7, columnspan = 4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row", "= PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46] text2 =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5,", "column = 6, columnspan = 4) master0.mainloop() def jateng1(self): master2 = Tk() master2.minsize(width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5,", "image=img) text1 = lst[43] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW,", "def __prov10(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self): master0 = Tk() master0.minsize(width", "text2.pack() master2.mainloop() def diy2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self): master2 = Tk() master2.minsize(width = 800,", "text1 = lst[40] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50] text2 = Text(master2, font =", "= 25) self.bprov16.grid(row = 16, column = 3, columnspan = 4) self.bprov17 =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self): master0 = Tk() master0.minsize(width =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row =", "= lst[47] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "anchor=NW, image=img) text1 = lst[13] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "sulut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width =", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2,", "text2.pack() master2.mainloop() def __prov18(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "di McDones (Direktori Macanan Tradisional)\") self.master.judul = Label(self.master, text = \"Pilih provinsi yang", "lst[15] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 6, columnspan = 4) master0.mainloop() def jateng1(self): master2 = Tk() master2.minsize(width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bika", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self): master2 = Tk() master2.minsize(width =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self): master0", "= '' txt = '' while('<end>' not in cmp): txt += cmp cmp", "= 1, column = 6, columnspan = 4) master0.mainloop() def kaltim1(self): master2 =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width =", "Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self): master0 = Tk() master0.minsize(width", "800, height = 600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas = Canvas(master2, width =", "= lst[40] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "width = 25) self.bprov12.grid(row = 12, column = 3, columnspan = 4) self.bprov13", "= PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0] text2 =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48]", "text2.pack() master2.mainloop() def __prov10(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self): master2", "text1) text2.pack() master2.mainloop() def lamp2(self): master2 = Tk() master2.minsize(width = 800, height =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self): master2 = Tk() master2.minsize(width = 800,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "command=self.__prov12, width = 25) self.bprov12.grid(row = 12, column = 3, columnspan = 4)", "column = 6, columnspan = 4) master0.mainloop() def bali1(self): master2 = Tk() master2.minsize(width", "master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42] text2", "anchor=NW, image=img) text1 = lst[21] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\")", "= 4) master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row", "= lst[50] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 4, column = 7, columnspan = 4) self.bprov22 = Button(self.master, text='Kalimantan Tengah',", "= canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2] text2 = Text(master2,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self): master2 = Tk() master2.minsize(width = 800, height", "text1 = lst[5] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= Button(master0, text='<NAME>', command=self.ban2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width", "text='Jawa Barat', command=self.__prov12, width = 25) self.bprov12.grid(row = 12, column = 3, columnspan", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width = 25,", "= 4) self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width = 25) self.bprov33.grid(row =", ":)\", font = \"Arial 16 bold\") master0.judul.grid(row = 0, column = 2, columnspan", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width = 25, height=3)", "width = 25) self.bprov27.grid(row = 10, column = 7, columnspan = 4) self.bprov28", "master0.mainloop() def sumut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "4) master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row =", "columnspan = 4) master0.mainloop() def sulut1(self): master2 = Tk() master2.minsize(width = 800, height", "= 4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self): master0 = Tk() master0.minsize(width = 450, height", "ingin anda ketahui\", font = \"Arial 16 bold\") self.master.judul.grid(row = 0, column =", "def ntb1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "anchor=NW, image=img) text1 = lst[18] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master2.mainloop() def kriau2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Button(master0, text='Asidah', command=self.riau2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self): master2 = Tk() master2.minsize(width", "text2.pack() master2.mainloop() def __prov33(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW,", "Bekekpor', command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "Sulawesi Selatan\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() if __name__ ==", "= Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25) self.bprov12.grid(row = 12, column =", "canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44] text2 = Text(master2, font = \"Arial", "line = f.readline() cmp = '' txt = '' while('<end>' not in cmp):", "= 4) self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row =", "= 4) master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self): master2 = Tk()", "column = 3, columnspan = 4) self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width", "3, columnspan = 4) self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25)", "img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55] text2 =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas =", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width =", "Button(master0, text='<NAME>', command=self.malut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2, width", "450, height = 100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul = Label(master0, text =", "PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31] text2 = Text(master2, font", "= lst[42] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "Tenggara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>u',", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW,", "= 600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas = Canvas(master2, width = 300, height", "master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row = 1,", "self.master.judul.grid(row = 0, column = 3, columnspan = 8) self.bprov1 = Button(self.master, text='Aceh',", "column = 7, columnspan = 4) self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width", "4) master0.mainloop() def beng1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52] text2 = Text(master2, font", "__prov22(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "img = PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3] text2", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW,", "text1) text2.pack() master2.mainloop() def ntt2(self): master2 = Tk() master2.minsize(width = 800, height =", "canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53] text2 = Text(master2, font = \"Arial", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sambal", "canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58] text2 = Text(master2, font = \"Arial", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat \")", "McDones (Direktori Macanan Tradisional)\") self.master.judul = Label(self.master, text = \"Pilih provinsi yang ingin", "= 4) master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width = 25, height=3)", "Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "master2.mainloop() def bali2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= 25) self.bprov20.grid(row = 3, column = 7, columnspan = 4) self.bprov21 =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5,", "= 4) master0.mainloop() def kalteng1(self): master2 = Tk() master2.minsize(width = 800, height =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self):", "= 25) self.bprov28.grid(row = 11, column = 7, columnspan = 4) self.bprov29 =", "master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "lst[23] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self): master0 =", "Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25) self.bprov21.grid(row = 4, column = 7,", "lst[37] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "master2.mainloop() def sulut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= lst[64] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width =", "columnspan = 4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width = 25, height=3) master0.bprov2.grid(row", "readf(): with open('all.txt', 'r') as f: line = '' for i in range(68):", "= 450, height = 100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul = Label(master0, text", "Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25) self.bprov5.grid(row = 5, column = 3,", "= 4) master0.mainloop() def sumut1(self): master2 = Tk() master2.minsize(width = 800, height =", "Button(master0, text='<NAME>', command=self.dki2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke Na", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width =", "= 4) master0.mainloop() def sulbar1(self): master2 = Tk() master2.minsize(width = 800, height =", "4) self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row = 8,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 25) self.bprov19.grid(row = 2, column = 7, columnspan = 4) self.bprov20 =", "img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33] text2", "= 600) master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2, width = 300, height =", "600) self.master.title(\"Selamat Datang di McDones (Direktori Macanan Tradisional)\") self.master.judul = Label(self.master, text =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51] text2", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW,", "Aceh\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self): master2 = Tk() master2.minsize(width = 800,", "lst[52] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 1, column = 6, columnspan = 4) master0.mainloop() def riau1(self): master2 =", "Sumatera Selatan\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "text='Ikan Bakar Manokwari', command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Belacan", "= PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49] text2 = Text(master2,", "Sulawesi Barat \") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self): master2 = Tk() master2.minsize(width = 800, height", "canvas.pack() img = PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8]", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW,", "canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14] text2 = Text(master2, font =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width = 25,", "sumsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "__prov10(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "Jawa Timur\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self): master2 =", "height = 600) master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2, width = 300, height", "text1) text2.pack() master2.mainloop() def __prov31(self): master0 = Tk() master0.minsize(width = 450, height =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self): master0 = Tk()", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width = 25,", "4) master0.mainloop() def aceh1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\")", "canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39]", "master2.mainloop() def kalut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2, width", "= PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20] text2 =", "img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11] text2 =", "master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "1, column = 6, columnspan = 4) master0.mainloop() def sulbar1(self): master2 = Tk()", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self): master0 = Tk()", "text1 = lst[48] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "def __prov32(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "def __prov30(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "def jambi1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "25) self.bprov19.grid(row = 2, column = 7, columnspan = 4) self.bprov20 = Button(self.master,", "text1 = lst[39] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "Utara', command=self.__prov2, width = 25) self.bprov2.grid(row = 2, column = 3, columnspan =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width", "Sulawesi Tenggara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width = 25,", "width = 25) self.bprov4.grid(row = 4, column = 3, columnspan = 4) self.bprov5", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self): master0 = Tk() master0.minsize(width = 450,", "__prov21(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "def sumbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row = 12, column = 7, columnspan =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self): master0 = Tk() master0.minsize(width = 450, height", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self): master0 =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41] text2 = Text(master2, font = \"Arial 12\")", "lst[46] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self):", "4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row = 2, column", "= 4) master0.mainloop() def sulut1(self): master2 = Tk() master2.minsize(width = 800, height =", "= Button(master0, text='Bika Ambon', command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "text1) text2.pack() master2.mainloop() def malut2(self): master2 = Tk() master2.minsize(width = 800, height =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas = Canvas(master2,", "master2.mainloop() def __prov11(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "500, height = 600) master.maxsize(width = 500, height = 600) self.master.title(\"Selamat Datang di", "text1 = lst[25] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "1, column = 6, columnspan = 4) master0.mainloop() def jateng1(self): master2 = Tk()", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self): master2", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self): master2 = Tk() master2.minsize(width = 800, height", "= Button(master0, text='<NAME>', command=self.riau1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self): master0 = Tk() master0.minsize(width = 450, height", "4) self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25) self.bprov21.grid(row = 4,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width", "NTB\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "def dki2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "Button(master0, text='Soto Kudus', command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self): master0 = Tk() master0.minsize(width =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53]", "img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49] text2 =", "= Button(master0, text='<NAME>', command=self.malut1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63] text2 = Text(master2, font = \"Arial 12\")", "master2.mainloop() def __prov5(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self): master0 = Tk() master0.minsize(width = 450, height", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "sumbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text='Papua Barat', command=self.__prov33, width = 25) self.bprov33.grid(row = 16, column = 7, columnspan", "columnspan = 4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width = 25, height=3)", "column = 7, columnspan = 4) self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width", "= 600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas = Canvas(master2, width = 300, height", "= Button(self.master, text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row = 10, column = 3,", "= 6, columnspan = 4) master0.mainloop() def kbang1(self): master2 = Tk() master2.minsize(width =", "width = 25) self.bprov9.grid(row = 9, column = 3, columnspan = 4) self.bprov10", "def malut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text1) text2.pack() master2.mainloop() def __prov25(self): master0 = Tk() master0.minsize(width = 450, height =", "600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas = Canvas(master2, width = 300, height =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 100) master0.maxsize(width = 450, height = 100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul", "Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTT\") canvas", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Ikan", "master2.mainloop() def sumbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width = 25, height=3) master0.bprov1.grid(row = 1,", "800, height = 600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas = Canvas(master2, width =", "= 25) self.bprov23.grid(row = 6, column = 7, columnspan = 4) self.bprov24 =", "text='Dorokdok', command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self): master0 = Tk() master0.minsize(width = 450,", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self): master2 = Tk() master2.minsize(width = 800,", "Sulawesi Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self): master0 = Tk() master0.minsize(width =", "image=img) text1 = lst[57] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "master2.title(\"Makanan tradisional Sumatera Utara\") canvas = Canvas(master2, width = 300, height = 300)", "= PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4] text2 = Text(master2,", "self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row = 13, column", "= lst[24] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "master2.mainloop() def __prov26(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2] text2 = Text(master2, font =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img)", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>',", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width", "Kolo', command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= 600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas = Canvas(master2, width = 300, height", "1, column = 6, columnspan = 4) master0.mainloop() def kalsel1(self): master2 = Tk()", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Pempek", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2, width", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas = Canvas(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61] text2 = Text(master2, font = \"Arial 12\")", "master0.maxsize(width = 450, height = 100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul = Label(master0,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bika Ambon.png\")", "text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "self.bprov7.grid(row = 7, column = 3, columnspan = 4) self.bprov8 = Button(self.master, text='Sumatera", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2,", "columnspan = 4) master0.mainloop() def lamp1(self): master2 = Tk() master2.minsize(width = 800, height", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Barat\") canvas =", "= lst[49] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self): master2 = Tk() master2.minsize(width = 800, height", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bilenthango',", "image=img) text1 = lst[8] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self): master0 = Tk() master0.minsize(width =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self): master2", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2, width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self): master2 = Tk() master2.minsize(width =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img)", "dki1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width =", "columnspan = 4) master0.mainloop() def malu1(self): master2 = Tk() master2.minsize(width = 800, height", "'r') as f: line = '' for i in range(68): while('<deskripsi>' not in", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalbar1(self): master2", "jambi1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "1, column = 6, columnspan = 4) master0.mainloop() def sulsel1(self): master2 = Tk()", "1, column = 6, columnspan = 4) master0.mainloop() def kbang1(self): master2 = Tk()", "canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16] text2 = Text(master2, font =", "image=img) text1 = lst[59] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Yogyakarta\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "text1 = lst[49] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "7, columnspan = 4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25)", "= 16, column = 7, columnspan = 4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34,", "master.maxsize(width = 500, height = 600) self.master.title(\"Selamat Datang di McDones (Direktori Macanan Tradisional)\")", "column = 3, columnspan = 8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width =", "text2.pack() master2.mainloop() def __prov9(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "def bali2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27] text2 = Text(master2,", "master2.mainloop() def diy2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "column = 6, columnspan = 4) master0.mainloop() def ntb1(self): master2 = Tk() master2.minsize(width", "= 25) self.bprov11.grid(row = 11, column = 3, columnspan = 4) self.bprov12 =", "text='Jepa', command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas", "img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21] text2 =", "3, columnspan = 4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self):", "= lst[46] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "lst[48] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "lst[59] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "command=self.__prov4, width = 25) self.bprov4.grid(row = 4, column = 3, columnspan = 4)", "25) self.bprov18.grid(row = 1, column = 7, columnspan = 4) self.bprov19 = Button(self.master,", "width = 25) self.bprov22.grid(row = 5, column = 7, columnspan = 4) self.bprov23", "tradisional Riau\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self): master2 = Tk() master2.minsize(width = 800, height", "text1 = lst[11] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "4) master0.mainloop() def kalut1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10] text2 = Text(master2, font", "= 15, column = 7, columnspan = 4) self.bprov33 = Button(self.master, text='Papua Barat',", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self): master2", "def lamp2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "text='<NAME>', command=self.malut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self): master2 = Tk() master2.minsize(width = 800,", "master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row =", "Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row = 5, column = 7, columnspan =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width", "25) self.bprov11.grid(row = 11, column = 3, columnspan = 4) self.bprov12 = Button(self.master,", "image=img) text1 = lst[3] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 3, columnspan = 4) self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self): master2 = Tk() master2.minsize(width =", "= 1, column = 6, columnspan = 4) master0.mainloop() def lamp1(self): master2 =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self): master0 = Tk() master0.minsize(width = 450,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek", "text2.pack() master2.mainloop() def __prov3(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "lst[58] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "4) self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25) self.bprov5.grid(row = 5,", "columnspan = 4) master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width = 25,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self): master0 =", "Button(master0, text='<NAME>', command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "7, columnspan = 4) self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25)", "lst[66] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "text='Bagar Hiu', command=self.beng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= 800, height = 600) master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2, width =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self): master2", "master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "master2.mainloop() def lamp2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "1, column = 6, columnspan = 4) master0.mainloop() def bali1(self): master2 = Tk()", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\")", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self):", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self): master0", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self): master2 =", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width = 25,", "canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41] text2 = Text(master2, font = \"Arial", "canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45] text2 = Text(master2, font", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self): master0 = Tk()", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self): master2 =", "self.bprov24.grid(row = 7, column = 7, columnspan = 4) self.bprov25 = Button(self.master, text='Gorontalo',", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width", "800, height = 600) master2.title(\"Makanan tradisional Jawa Timur\") canvas = Canvas(master2, width =", "ntb1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "column = 6, columnspan = 4) master0.mainloop() def aceh1(self): master2 = Tk() master2.minsize(width", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self): master2 =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width = 25, height=3)", "column = 3, columnspan = 4) self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width", "= 800, height = 600) master2.title(\"Makanan tradisional Papua Barat\") canvas = Canvas(master2, width", "PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55] text2 = Text(master2, font", "width = 25) self.bprov18.grid(row = 1, column = 7, columnspan = 4) self.bprov19", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1,", "PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16] text2 = Text(master2,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\")", "self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row = 11, column =", "lst[5] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img)", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas = Canvas(master2,", "4) self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25) self.bprov3.grid(row = 3,", "= 4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row = 1,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def diy1(self): master2", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self): master2 = Tk() master2.minsize(width =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width = 25, height=3)", "1, column = 6, columnspan = 4) master0.mainloop() def beng1(self): master2 = Tk()", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width = 25, height=3)", "text1 = lst[56] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38] text2", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self): master0 = Tk()", "= 4) self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row =", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Barat\")", "Timur\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "= Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800, height =", "text2.pack() master2.mainloop() def kalteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2,", "PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15] text2 = Text(master2,", "img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44] text2 =", "= \"Pilih provinsi yang ingin anda ketahui\", font = \"Arial 16 bold\") self.master.judul.grid(row", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def aceh1(self):", "Button(master0, text='<NAME>', command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Ikan Bakar", "def ban1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 1, column = 6, columnspan = 4) master0.mainloop() def kbang1(self): master2 =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self): master2 = Tk() master2.minsize(width = 800, height", "= 4) master0.mainloop() def ban1(self): master2 = Tk() master2.minsize(width = 800, height =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self): master2", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5,", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width =", "columnspan = 4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row =", "anchor=NW, image=img) text1 = lst[7] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self): master2 = Tk() master2.minsize(width = 800,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas = Canvas(master2,", "columnspan = 4) master0.mainloop() def bali1(self): master2 = Tk() master2.minsize(width = 800, height", "Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23] text2 = Text(master2, font = \"Arial", "= canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1] text2 = Text(master2, font", "canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22]", "= PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52] text2 =", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Betawi',", "command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "height = 600) master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2, width = 300, height", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self): master2 = Tk()", "jatim1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 6, columnspan = 4) master0.mainloop() def aceh1(self): master2 = Tk() master2.minsize(width =", "PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28] text2 = Text(master2,", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1,", "4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "#aminnn from tkinter import * lst = [] def readf(): with open('all.txt', 'r')", "def __prov24(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self): master2 =", "master0.mainloop() def sulbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self): master2 =", "lst[61] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Coto", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto", "= 800, height = 600) master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2, width =", "anda ketahui\", font = \"Arial 16 bold\") self.master.judul.grid(row = 0, column = 3,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kue Timpan.png\")", "4) master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW,", "master2.title(\"Makanan tradisional Sulawesi Barat\") canvas = Canvas(master2, width = 300, height = 300)", "Utara', command=self.__prov26, width = 25) self.bprov26.grid(row = 9, column = 7, columnspan =", "self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25) self.bprov16.grid(row = 16, column", "master2.title(\"Makanan tradisional DI Yogyakarta\") canvas = Canvas(master2, width = 300, height = 300)", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas =", "def riau1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24] text2 = Text(master2, font = \"Arial", "= 1, column = 6, columnspan = 4) master0.mainloop() def goron1(self): master2 =", "= '' while('<end>' not in cmp): txt += cmp cmp = f.readline() lst.append(txt)", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulut1(self): master2", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width = 25,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self): master0 =", "master0.mainloop() def sumsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self): master0 = Tk()", "anchor=NW, image=img) text1 = lst[12] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "column = 7, columnspan = 4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width =", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalut1(self): master2", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self): master0 = Tk() master0.minsize(width = 450,", "master2.title(\"Makanan tradisional DKI Jakarta\") canvas = Canvas(master2, width = 300, height = 300)", "= PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60] text2 =", "= 25) self.bprov32.grid(row = 15, column = 7, columnspan = 4) self.bprov33 =", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width = 25, height=3) master0.bprov1.grid(row", "= canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51] text2 = Text(master2, font =", "25) self.bprov9.grid(row = 9, column = 3, columnspan = 4) self.bprov10 = Button(self.master,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self): master0 = Tk() master0.minsize(width =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Barat\") canvas = Canvas(master2,", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2,", "800, height = 600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas = Canvas(master2, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33]", "text1 = lst[6] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self): master2 =", "columnspan = 4) master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25, height=3)", "text='<NAME>', command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "4) master0.mainloop() def riau1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= 6, columnspan = 4) master0.mainloop() def ban1(self): master2 = Tk() master2.minsize(width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5,", "= canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47] text2 = Text(master2, font", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\")", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas = Canvas(master2,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self): master2", "= lst[58] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "text1 = lst[66] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bangka", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas", "self.bprov20.grid(row = 3, column = 7, columnspan = 4) self.bprov21 = Button(self.master, text='Kalimantan", "anchor=NW, image=img) text1 = lst[53] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 4) self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek", "3, columnspan = 4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row", "= 600) master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2, width = 300, height =", "tradisional Kalimantan Tengah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 4) master0.mainloop() def kbang1(self): master2 = Tk() master2.minsize(width = 800, height =", "600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas = Canvas(master2, width = 300, height =", "master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "tradisional Jambi\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64] text2 = Text(master2,", "command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "text='Sambal Colo-colo', command=self.malu1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39] text2 = Text(master2, font =", "master0.mainloop() def jabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "columnspan = 4) self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row", "column = 6, columnspan = 4) master0.mainloop() def sumut1(self): master2 = Tk() master2.minsize(width", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengha\")", "Kepulauan Riau\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Ulat", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self): master2 = Tk()", "columnspan = 4) master0.mainloop() def ban1(self): master2 = Tk() master2.minsize(width = 800, height", "= canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43] text2 = Text(master2, font =", "def __prov26(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "def sumbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 6, columnspan = 4) master0.mainloop() def ntb1(self): master2 = Tk() master2.minsize(width =", "columnspan = 4) self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row", "= Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25) self.bprov24.grid(row = 7, column =", "= PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8] text2 =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam", "Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW,", "text='<NAME>', command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15] text2 = Text(master2, font = \"Arial", "4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row = 1,", "text='<NAME>', command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "__prov34(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800, height", "column = 6, columnspan = 4) master0.mainloop() def sulut1(self): master2 = Tk() master2.minsize(width", "= 4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row =", "master0.mainloop() def riau1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[48] text2 = Text(master2, font = \"Arial", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulsel1(self):", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 4) self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row =", "master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img)", "4) master0.mainloop() def lamp1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text2.pack() master2.mainloop() def dki2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= lst[44] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "tradisional Sulawesi Selatan\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "text1) text2.pack() master2.mainloop() def __prov5(self): master0 = Tk() master0.minsize(width = 450, height =", "canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11] text2 = Text(master2, font = \"Arial", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional BAnten\") canvas", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "columnspan = 4) master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25, height=3)", "= 500, height = 600) self.master.title(\"Selamat Datang di McDones (Direktori Macanan Tradisional)\") self.master.judul", "= 4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width =", "master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "text1 = lst[22] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "anchor=NW, image=img) text1 = lst[5] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= lst[36] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 600) master2.title(\"Makanan tradisional Jawa Barat\") canvas = Canvas(master2, width = 300, height", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "800, height = 600) master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2, width = 300,", "= PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15] text2 =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self): master0", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width =", "= 7, columnspan = 4) self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width =", "master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35] text2 = Text(master2,", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1,", "text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row = 1, column = 7, columnspan =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas =", "= Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row = 1,", "6, columnspan = 4) master0.mainloop() def jabar1(self): master2 = Tk() master2.minsize(width = 800,", "img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42] text2 =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas =", "= PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9] text2 =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self): master2 = Tk() master2.minsize(width = 800, height", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self): master0 = Tk() master0.minsize(width = 450,", "text1) text2.pack() master2.mainloop() def jateng2(self): master2 = Tk() master2.minsize(width = 800, height =", "anchor=NW, image=img) text1 = lst[32] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master2.mainloop() def __prov6(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW,", "text1 = lst[4] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self): master0", "canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35] text2 = Text(master2, font =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku Utara\") canvas", "Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row = 5, column = 7,", "= 4) master0.mainloop() def papua1(self): master2 = Tk() master2.minsize(width = 800, height =", "1, column = 6, columnspan = 4) master0.mainloop() def ban1(self): master2 = Tk()", "canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12] text2 = Text(master2, font =", "6, columnspan = 4) master0.mainloop() def malu1(self): master2 = Tk() master2.minsize(width = 800,", "= 1, column = 6, columnspan = 4) master0.mainloop() def jatim1(self): master2 =", "command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "text2.pack() master2.mainloop() def __prov27(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "tradisional NTT\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13] text2", "= 4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self): master0 = Tk() master0.minsize(width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self): master2 = Tk() master2.minsize(width =", "ntb2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row = 2, column = 7, columnspan =", "columnspan = 4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row", "import * lst = [] def readf(): with open('all.txt', 'r') as f: line", "f.readline() cmp = '' txt = '' while('<end>' not in cmp): txt +=", "Riau', command=self.__prov5, width = 25) self.bprov5.grid(row = 5, column = 3, columnspan =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2,", "anchor=NW, image=img) text1 = lst[27] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66] text2 = Text(master2, font", "= 3, columnspan = 4) self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width =", "jambi2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self): master2 = Tk()", "4) master0.mainloop() def ban1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width = 25, height=3)", "text1 = lst[44] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "columnspan = 4) self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width = 25) self.bprov33.grid(row", "image=img) text1 = lst[58] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\")", "= 9, column = 3, columnspan = 4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self):", "= lst[22] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 4) master0.mainloop() def riau1(self): master2 = Tk() master2.minsize(width = 800, height =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self): master2 = Tk()", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width", "= Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25) self.bprov21.grid(row = 4, column =", "= 4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3)", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas =", "= PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1] text2 =", "Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row = 9, column = 3, columnspan =", "column = 3, columnspan = 4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width =", "Lampung\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self): master0 = Tk() master0.minsize(width", "master2.mainloop() def beng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "master2.mainloop() def kaltim2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8] text2 = Text(master2, font = \"Arial 12\")", "def dki1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width", "= 4) self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25) self.bprov3.grid(row =", "def bali1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "columnspan = 4) self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31] text2 = Text(master2, font = \"Arial", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row =", "Button(self.master, text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row = 7, column = 3, columnspan", "Timur', command=self.__prov16, width = 25) self.bprov16.grid(row = 16, column = 3, columnspan =", "Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25) self.bprov24.grid(row = 7, column = 7,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self): master2 = Tk() master2.minsize(width = 800, height", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2,", "= canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21] text2 = Text(master2, font =", "columnspan = 4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width = 25, height=3) master0.bprov2.grid(row", "command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "= PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17] text2 =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self): master0 =", "kalteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text1) text2.pack() master2.mainloop() def __prov24(self): master0 = Tk() master0.minsize(width = 450, height =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width", "= 25) self.bprov18.grid(row = 1, column = 7, columnspan = 4) self.bprov19 =", "def __prov2(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "self.bprov16.grid(row = 16, column = 3, columnspan = 4) self.bprov17 = Button(self.master, text='Bali',", "for i in range(68): while('<deskripsi>' not in line): line = f.readline() cmp =", "canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22] text2 = Text(master2, font =", "text2.pack() master2.mainloop() def riau2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width =", "= Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "lst[44] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= '' for i in range(68): while('<deskripsi>' not in line): line = f.readline()", "= Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row = 15, column =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self): master2 = Tk() master2.minsize(width =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self): master0 = Tk()", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Gorontalo\")", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\")", "3, columnspan = 4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width = 25,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1,", "lst[31] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width =", "height = 600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas = Canvas(master2, width = 300,", "PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56] text2 = Text(master2,", "Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40] text2 = Text(master2, font", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3)", "command=self.malu2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "4) master0.mainloop() def kriau1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text2.pack() master2.mainloop() def __prov13(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11] text2 = Text(master2, font", "= 12, column = 7, columnspan = 4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara',", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def papua1(self):", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\")", "self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row = 1, column =", "= 4) self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25) self.bprov31.grid(row =", "text='Kue Timpan', command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= lst[15] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20] text2 = Text(master2, font = \"Arial 12\")", "= Button(master0, text='<NAME>', command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "self.bprov15.grid(row = 15, column = 3, columnspan = 4) self.bprov16 = Button(self.master, text='Jawa", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self): master2 = Tk() master2.minsize(width =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img)", "PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14] text2 = Text(master2,", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width =", "= 800, height = 600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas = Canvas(master2, width", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas = Canvas(master2,", "= Button(master0, text='Gong gong', command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self): master2 = Tk() master2.minsize(width =", "4) master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width = 25, height=3) master0.bprov2.grid(row =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29] text2 = Text(master2, font = \"Arial 12\")", "kalut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "column = 6, columnspan = 4) master0.mainloop() def papua1(self): master2 = Tk() master2.minsize(width", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self): master0 = Tk()", "lst[42] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800, height = 600)", "4) master0.mainloop() def malu1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33] text2 = Text(master2, font", "master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "= lst[62] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "3, columnspan = 4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kaltim1(self):", "canvas.pack() img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66]", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width = 25,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self):", "= 4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row = 8,", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row = 1,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self): master0 = Tk() master0.minsize(width =", "= 6, columnspan = 4) master0.mainloop() def sulgar1(self): master2 = Tk() master2.minsize(width =", "= 4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row = 17,", "width = 25) self.bprov21.grid(row = 4, column = 7, columnspan = 4) self.bprov22", "tradisional Aceh\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "command=self.beng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= lst[37] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Bubur Paddas Sambas',", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self): master2", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW,", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5,", "anchor=NW, image=img) text1 = lst[40] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self): master0 = Tk() master0.minsize(width =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self): master2", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Ulat Sagu',", "Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "command=self.__prov22, width = 25) self.bprov22.grid(row = 5, column = 7, columnspan = 4)", "= PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27] text2 =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self): master2 = Tk() master2.minsize(width = 800,", "= 3, columnspan = 4) self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width =", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def pabar1(self): master2", "4) master0.mainloop() def pabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "column = 7, columnspan = 4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self): master2 = Tk() master2.minsize(width = 800, height", "command=self.__prov31, width = 25) self.bprov31.grid(row = 14, column = 7, columnspan = 4)", "anchor=NW, image=img) text1 = lst[10] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self): master0 = Tk() master0.minsize(width = 450, height", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25,", "Barat', command=self.__prov33, width = 25) self.bprov33.grid(row = 16, column = 7, columnspan =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self): master0", "text='Kalimantan Barat', command=self.__prov21, width = 25) self.bprov21.grid(row = 4, column = 7, columnspan", "master0.mainloop() def jambi1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16]", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def lamp1(self): master2", "Sagu', command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "sumut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "800, height = 600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas = Canvas(master2, width =", "canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30] text2 = Text(master2, font =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self): master2 = Tk() master2.minsize(width", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalut1(self):", "Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row = 14, column = 3,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self): master0", "4) master0.mainloop() def sulut1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32] text2 = Text(master2, font", "master0.mainloop() def papua1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25) self.bprov12.grid(row = 12, column", "Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22] text2 = Text(master2, font = \"Arial", "1, column = 7, columnspan = 4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width", "master2.mainloop() def __prov4(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59] text2 = Text(master2, font =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2, width", "columnspan = 4) self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row", "height = 600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas = Canvas(master2, width = 300,", "columnspan = 4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2,", "4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18] text2 =", "columnspan = 4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row", "6, columnspan = 4) master0.mainloop() def sumsel1(self): master2 = Tk() master2.minsize(width = 800,", "= 600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas = Canvas(master2, width = 300, height", "6, columnspan = 4) master0.mainloop() def sulbar1(self): master2 = Tk() master2.minsize(width = 800,", "anchor=NW, image=img) text1 = lst[19] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 4) self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25) self.bprov21.grid(row =", "= 25) self.bprov27.grid(row = 10, column = 7, columnspan = 4) self.bprov28 =", "= 1, column = 6, columnspan = 4) master0.mainloop() def sulbar1(self): master2 =", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width = 25,", "text1) text2.pack() master2.mainloop() def __prov34(self): master0 = Tk() master0.minsize(width = 450, height =", "__prov19(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "Tradisional)\") self.master.judul = Label(self.master, text = \"Pilih provinsi yang ingin anda ketahui\", font", "text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row = 8, column = 7, columnspan =", "= 6, columnspan = 4) master0.mainloop() def sumsel1(self): master2 = Tk() master2.minsize(width =", "6, columnspan = 4) master0.mainloop() def sulsel1(self): master2 = Tk() master2.minsize(width = 800,", "= Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row = 14, column =", "self.bprov27.grid(row = 10, column = 7, columnspan = 4) self.bprov28 = Button(self.master, text='Sulawesi", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self): master2 = Tk() master2.minsize(width = 800,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self): master0", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self): master0 = Tk() master0.minsize(width =", "with open('all.txt', 'r') as f: line = '' for i in range(68): while('<deskripsi>'", "self.bprov18.grid(row = 1, column = 7, columnspan = 4) self.bprov19 = Button(self.master, text='NTT',", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Ulat", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18] text2 = Text(master2, font = \"Arial 12\")", "PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39] text2 = Text(master2,", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jadah", "text1) text2.pack() master2.mainloop() def __prov20(self): master0 = Tk() master0.minsize(width = 450, height =", "1, column = 6, columnspan = 4) master0.mainloop() def sulut1(self): master2 = Tk()", "= \"Arial 16 bold\") master0.judul.grid(row = 0, column = 2, columnspan = 4)", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self): master2 = Tk() master2.minsize(width = 800,", "= PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22] text2 =", "= 8, column = 7, columnspan = 4) self.bprov26 = Button(self.master, text='Sulawesi Utara',", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kalumpe',", "'' txt = '' while('<end>' not in cmp): txt += cmp cmp =", "width = 25) self.bprov28.grid(row = 11, column = 7, columnspan = 4) self.bprov29", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self): master0", "master2.title(\"Makanan tradisional Sulawesi Utara\") canvas = Canvas(master2, width = 300, height = 300)", "= PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64] text2", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self): master2 = Tk()", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "height = 600) master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2, width = 300, height", "image=img) text1 = lst[49] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row = 17, column =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[22] text2 = Text(master2, font = \"Arial 12\")", "def sulsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\")", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self): master0 = Tk() master0.minsize(width =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self): master2", "text1) text2.pack() master2.mainloop() def __prov28(self): master0 = Tk() master0.minsize(width = 450, height =", "command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row", "= Button(master0, text='<NAME>', command=self.papua1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width = 25,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24]", "= Label(master0, text = \"Pilih makanan :)\", font = \"Arial 16 bold\") master0.judul.grid(row", "canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "tradisional Kalimantan Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row = 6, column = 7,", "= canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56] text2 = Text(master2, font", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas = Canvas(master2,", "master2.mainloop() def ntt2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= lst[61] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64] text2 = Text(master2, font = \"Arial 12\")", "self.bprov14.grid(row = 14, column = 3, columnspan = 4) self.bprov15 = Button(self.master, text='DI", "column = 6, columnspan = 4) master0.mainloop() def jatim1(self): master2 = Tk() master2.minsize(width", "4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row = 1,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self): master2 = Tk() master2.minsize(width =", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width = 25,", "lst[12] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self): master2 =", "= Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 4) master0.mainloop() def diy1(self): master2 = Tk() master2.minsize(width = 800, height =", "columnspan = 4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5,", "self.bprov10.grid(row = 10, column = 3, columnspan = 4) self.bprov11 = Button(self.master, text='Banten',", "= canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0] text2 = Text(master2, font", "canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0] text2 = Text(master2, font =", "master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "text='<NAME>', command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2,", "600) master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2, width = 300, height = 300)", "anchor=NW, image=img) text1 = lst[45] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5] text2 = Text(master2, font = \"Arial", "4) master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row =", "def riau2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "1, column = 6, columnspan = 4) master0.mainloop() def jatim1(self): master2 = Tk()", "= 1, column = 6, columnspan = 4) master0.mainloop() def dki1(self): master2 =", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def bali1(self): master2", "columnspan = 4) master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width = 25, height=3)", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self): master0 = Tk() master0.minsize(width = 450,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self): master2 = Tk()", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Timur\") canvas", "canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas = Canvas(master2, width =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self): master0 = Tk() master0.minsize(width = 450, height", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width = 25, height=3) master0.bprov2.grid(row = 1,", "Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56] text2 = Text(master2, font = \"Arial", "height = 600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas = Canvas(master2, width = 300,", "lamp2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width = 25,", "= canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44] text2 = Text(master2, font =", "canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32] text2 = Text(master2, font =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self): master0 = Tk() master0.minsize(width =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self): master2 = Tk() master2.minsize(width", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self): master0 =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width =", "img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46] text2", "1, column = 6, columnspan = 4) master0.mainloop() def pabar1(self): master2 = Tk()", "columnspan = 4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self): master2 = Tk()", "master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row = 1,", "800, height = 600) master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2, width = 300,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi", "command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26]", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5,", "command=self.__prov14, width = 25) self.bprov14.grid(row = 14, column = 3, columnspan = 4)", "= lst[57] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 6, columnspan = 4) master0.mainloop() def kaltim1(self): master2 = Tk() master2.minsize(width =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self): master2 =", "PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24] text2 = Text(master2, font", "800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat \") canvas = Canvas(master2, width", "img = PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0] text2", "lst[67] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() if", "PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33] text2 = Text(master2,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas = Canvas(master2, width", "4) self.master.mainloop() def __prov1(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "text='Sulawesi Barat', command=self.__prov27, width = 25) self.bprov27.grid(row = 10, column = 7, columnspan", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self): master0 = Tk() master0.minsize(width = 450,", "tradisional Sumatera Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "Dadiah', command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self): master2 = Tk() master2.minsize(width", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Ampiang Dadiah',", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self): master0 = Tk() master0.minsize(width", "master2.title(\"Makanan tradisional Sulawesi Barat \") canvas = Canvas(master2, width = 300, height =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self): master2 = Tk()", "= PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31] text2 = Text(master2,", "= 1, column = 6, columnspan = 4) master0.mainloop() def ntt1(self): master2 =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2, width", "def beng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self):", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\")", "self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row = 17, column =", "Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row = 14, column = 3, columnspan =", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width = 25,", "= 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row =", "= 800, height = 600) master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2, width =", "height = 600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas = Canvas(master2, width = 300,", "img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19] text2", "master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row = 1,", "kalbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "height = 600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas = Canvas(master2, width = 300,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self): master2 = Tk() master2.minsize(width =", "master2.mainloop() def ban2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self): master2 = Tk() master2.minsize(width = 800,", "image=img) text1 = lst[46] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46] text2 = Text(master2,", "= 3, columnspan = 4) self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width =", "canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6] text2 = Text(master2, font =", "= 4) master0.mainloop() def lamp1(self): master2 = Tk() master2.minsize(width = 800, height =", "4) master0.mainloop() def kalteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width = 25, height=3)", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34] text2 = Text(master2, font = \"Arial 12\")", "master2.mainloop() def __prov12(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "master2.mainloop() def sumut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Button(master0, text='<NAME>', command=self.bali1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self): master0", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5,", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row", "text1) text2.pack() master2.mainloop() def __prov14(self): master0 = Tk() master0.minsize(width = 450, height =", "canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34] text2 = Text(master2, font =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self): master0 = Tk() master0.minsize(width = 450, height", "master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 2, column = 7, columnspan = 4) self.bprov20 = Button(self.master, text='Kalimantan Utara',", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self): master2 = Tk() master2.minsize(width =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2,", "= 3, column = 3, columnspan = 4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self):", "= 17, column = 7, columnspan = 4) self.master.mainloop() def __prov1(self): master0 =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= 0, column = 3, columnspan = 8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1,", "= \"Pilih makanan :)\", font = \"Arial 16 bold\") master0.judul.grid(row = 0, column", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 6, columnspan = 4) master0.mainloop() def sumbar1(self): master2 = Tk() master2.minsize(width =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width = 25, height=3)", "= lst[3] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "4) master0.mainloop() def sumbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "image=img) text1 = lst[30] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "img = PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45]", "= Button(master0, text='Bagar Hiu', command=self.beng2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= Button(self.master, text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row = 7, column = 3,", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width = 25, height=3) master0.bprov2.grid(row", "anchor=NW, image=img) text1 = lst[25] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self): master0 =", "anchor=NW, image=img) text1 = lst[38] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "def sulgar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width =", "img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36] text2 =", "Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14] text2 = Text(master2, font = \"Arial", "master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "text1 = lst[43] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width =", "6, columnspan = 4) master0.mainloop() def kaltim1(self): master2 = Tk() master2.minsize(width = 800,", "6, columnspan = 4) master0.mainloop() def malut1(self): master2 = Tk() master2.minsize(width = 800,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "master0.mainloop() def beng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\")", "800, height = 600) master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2, width = 300,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self): master0 = Tk() master0.minsize(width = 450, height", "__prov26(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lepek Binti',", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Seruit", "Lampung', command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "Button(master0, text=\"Se'i\", command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self): master2 = Tk() master2.minsize(width =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jatim1(self):", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulteng1(self): master2", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua Barat\") canvas =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self):", "= 800, height = 600) master2.title(\"Makanan tradisional Maluku Utara\") canvas = Canvas(master2, width", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self): master0 = Tk() master0.minsize(width", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self): master0 = Tk() master0.minsize(width = 450,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2,", "4) master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row =", "master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[57] text2 = Text(master2, font = \"Arial", "anchor=NW, image=img) text1 = lst[24] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 25) self.bprov25.grid(row = 8, column = 7, columnspan = 4) self.bprov26 =", "f: line = '' for i in range(68): while('<deskripsi>' not in line): line", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\")", "canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8] text2 = Text(master2, font =", "command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "text1) text2.pack() master2.mainloop() def sulbar2(self): master2 = Tk() master2.minsize(width = 800, height =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self): master0 =", "text='Sinonggi', command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "lst[9] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\")", "= lst[56] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row = 13,", "Colo-colo', command=self.malu1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23] text2 = Text(master2, font =", "= 600) master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2, width = 300, height =", "= Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "cmp): txt += cmp cmp = f.readline() lst.append(txt) line = f.readline() class DirektoriMakanan():", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self): master2 = Tk() master2.minsize(width =", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jabar1(self): master2", "height = 600) master2.title(\"Makanan tradisional Maluku Utara\") canvas = Canvas(master2, width = 300,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14] text2 = Text(master2, font = \"Arial 12\")", "while('<end>' not in cmp): txt += cmp cmp = f.readline() lst.append(txt) line =", "sumsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25] text2 = Text(master2, font = \"Arial 12\")", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "columnspan = 4) master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width = 25, height=3)", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self): master2 = Tk()", "= canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39] text2 = Text(master2, font", "PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53] text2 = Text(master2, font", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self): master0 =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self):", "= 4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row =", "img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27] text2", "Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16] text2 = Text(master2, font = \"Arial", "13, column = 7, columnspan = 4) self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31,", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5, anchor=NW,", "= 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row =", "image=img) text1 = lst[47] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row = 1,", "master0.mainloop() def kriau1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "f.readline() lst.append(txt) line = f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()): self.master = master", "= 10, column = 7, columnspan = 4) self.bprov28 = Button(self.master, text='Sulawesi Tengah',", "4) master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self): master2 = Tk() master2.minsize(width =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width", "command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "3, columnspan = 8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row", "Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row = 15, column = 3,", "height = 600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas = Canvas(master2, width = 300,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self): master2", "img = PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40]", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self): master2 = Tk()", "= 6, columnspan = 4) master0.mainloop() def kalteng1(self): master2 = Tk() master2.minsize(width =", "= 4) self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25) self.bprov27.grid(row =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25,", "text='<NAME>', command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self): master2 =", "text1 = lst[50] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Lampung\") canvas", "= 6, columnspan = 4) master0.mainloop() def jabar1(self): master2 = Tk() master2.minsize(width =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self): master0", "canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52] text2 = Text(master2, font =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\")", "4) master0.mainloop() def sulteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55] text2 = Text(master2,", "columnspan = 4) master0.mainloop() def kalut1(self): master2 = Tk() master2.minsize(width = 800, height", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text2.pack() master2.mainloop() def malut2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7] text2 = Text(master2, font =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 4) master0.mainloop() def kalut1(self): master2 = Tk() master2.minsize(width = 800, height =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img)", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kaledo',", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tengah\")", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "4) master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self): master2", "= 1, column = 3, columnspan = 4) self.bprov2 = Button(self.master, text='Sumatera Utara',", "7, columnspan = 4) self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25)", "Label(master0, text = \"Pilih makanan :)\", font = \"Arial 16 bold\") master0.judul.grid(row =", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jambi\")", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width", "master0.mainloop() def malut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row = 12, column =", "1, column = 6, columnspan = 4) master0.mainloop() def aceh1(self): master2 = Tk()", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3)", "master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row = 6, column =", "Bali\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self):", "3, column = 3, columnspan = 4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self): master2 = Tk() master2.minsize(width = 800,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas = Canvas(master2,", "4) master0.mainloop() def diy1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "def sulbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row = 15, column = 3, columnspan =", "Makanan Tradisional Nusantara\") master0.judul = Label(master0, text = \"Pilih makanan :)\", font =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self): master0 = Tk()", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Nasi", "height = 600) master2.title(\"Makanan tradisional Jawa Barat\") canvas = Canvas(master2, width = 300,", "Button(master0, text='Gong gong', command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= Button(master0, text='Jadah Tempe', command=self.diy2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self): master2 =", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gangan Asam Banjar',", "command=self.__prov11, width = 25) self.bprov11.grid(row = 11, column = 3, columnspan = 4)", "master2.mainloop() def __prov29(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row = 13, column =", "command=self.__prov8, width = 25) self.bprov8.grid(row = 8, column = 3, columnspan = 4)", "command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self): master0 = Tk() master0.minsize(width", "Paddas Sambas', command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= 100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul = Label(master0, text = \"Pilih makanan", "= lst[27] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "master2.mainloop() def __prov31(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "command=self.bali1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bagar", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30]", "800, height = 600) master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2, width = 300,", "self.bprov22.grid(row = 5, column = 7, columnspan = 4) self.bprov23 = Button(self.master, text='Kalimantan", "= 6, columnspan = 4) master0.mainloop() def sulbar1(self): master2 = Tk() master2.minsize(width =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas", "6, columnspan = 4) master0.mainloop() def kalut1(self): master2 = Tk() master2.minsize(width = 800,", "column = 6, columnspan = 4) master0.mainloop() def jabar1(self): master2 = Tk() master2.minsize(width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW,", "img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14] text2", "self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25) self.bprov20.grid(row = 3, column", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "text='Coto Makassar', command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self): master2 = Tk() master2.minsize(width", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rendang',", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW,", "25) self.bprov7.grid(row = 7, column = 3, columnspan = 4) self.bprov8 = Button(self.master,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jateng2(self):", "self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row = 7, column =", "text1 = lst[26] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "Tradisional Nusantara\") master0.judul = Label(master0, text = \"Pilih makanan :)\", font = \"Arial", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Bubur", "= 3, columnspan = 4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width = 25)", "Barat \") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "height = 600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas = Canvas(master2, width = 300,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kbang1(self): master2", "= 6, columnspan = 4) master0.mainloop() def kalut1(self): master2 = Tk() master2.minsize(width =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width", "= 4) master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bagar Hiu',", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def malu1(self): master2", "= 4) self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25) self.bprov26.grid(row =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self): master2 = Tk() master2.minsize(width = 800,", "goron1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self): master2", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2,", "= Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self): master0 = Tk() master0.minsize(width = 450,", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3)", "columnspan = 4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3) master0.bprov1.grid(row", "master0.mainloop() def jatim1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self): master0", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width = 25,", "canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61] text2 = Text(master2, font =", "tradisional Maluku\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "column = 6, columnspan = 4) master0.mainloop() def kalbar1(self): master2 = Tk() master2.minsize(width", "500, height = 600) self.master.title(\"Selamat Datang di McDones (Direktori Macanan Tradisional)\") self.master.judul =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bengkulu\") canvas =", "text1 = lst[63] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jambi1(self): master2", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2, width", "Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35] text2 = Text(master2, font = \"Arial", "= canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3] text2 = Text(master2, font", "def jabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self): master0 = Tk()", "lst[64] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self):", "anchor=NW, image=img) text1 = lst[57] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master2.mainloop() def __prov28(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "text='<NAME>', command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self): master0 = Tk() master0.minsize(width =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1,", "canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67] text2 = Text(master2, font = \"Arial", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self): master2 = Tk() master2.minsize(width = 800, height", "column = 7, columnspan = 4) self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width", "master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self):", "anchor=NW, image=img) text1 = lst[0] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() if __name__ == \"__main__\": readf()", "= lst[6] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "width = 25) self.bprov14.grid(row = 14, column = 3, columnspan = 4) self.bprov15", "columnspan = 4) master0.mainloop() def ntt1(self): master2 = Tk() master2.minsize(width = 800, height", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self):", "= 600) master2.title(\"Makanan tradisional Papua Barat\") canvas = Canvas(master2, width = 300, height", "6, columnspan = 4) master0.mainloop() def aceh1(self): master2 = Tk() master2.minsize(width = 800,", "def kaltim2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3]", "img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16] text2", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43]", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas =", "text1) text2.pack() master2.mainloop() def papua2(self): master2 = Tk() master2.minsize(width = 800, height =", "height = 600) master2.title(\"Makanan tradisional Papua\") canvas = Canvas(master2, width = 300, height", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1,", "= 600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas = Canvas(master2, width = 300, height", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self): master0 =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "(Direktori Macanan Tradisional)\") self.master.judul = Label(self.master, text = \"Pilih provinsi yang ingin anda", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self): master2 = Tk() master2.minsize(width = 800,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2, width", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Uta Kelo.png\")", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku\")", "lst[14] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row = 13, column = 7,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38] text2 = Text(master2, font = \"Arial 12\")", "text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row = 9, column = 3,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 6, columnspan = 4) master0.mainloop() def kriau1(self): master2 = Tk() master2.minsize(width =", "master2.mainloop() def __prov10(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self): master2 = Tk() master2.minsize(width = 800,", "def kalsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "self.bprov23.grid(row = 6, column = 7, columnspan = 4) self.bprov24 = Button(self.master, text='Kalimantan", "master2.title(\"Makanan tradisional Sumatera Barat\") canvas = Canvas(master2, width = 300, height = 300)", "def sulteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 1, column = 6, columnspan = 4) master0.mainloop() def pabar1(self): master2 =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self): master0 = Tk() master0.minsize(width = 450,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self): master2 = Tk() master2.minsize(width", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5,", "text='Rawon', command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Dorokdok',", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50] text2 = Text(master2, font = \"Arial 12\")", "self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25) self.bprov2.grid(row = 2, column", "Barat', command=self.__prov12, width = 25) self.bprov12.grid(row = 12, column = 3, columnspan =", "image=img) text1 = lst[15] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def malut1(self): master2", "= PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13] text2 =", "Button(master0, text='<NAME>', command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "dki2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "25) self.bprov21.grid(row = 4, column = 7, columnspan = 4) self.bprov22 = Button(self.master,", "4) master0.mainloop() def sumut1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self): master0", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15] text2 = Text(master2, font = \"Arial 12\")", "Tempe', command=self.diy2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self): master2 = Tk() master2.minsize(width = 800,", "Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2] text2 = Text(master2, font = \"Arial", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self): master0 = Tk() master0.minsize(width =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self): master2 = Tk()", "command=self.malu1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "master0.mainloop() def sumbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimnantan", "canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51] text2 = Text(master2, font = \"Arial", "text2.pack() master2.mainloop() def ban2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25) self.bprov20.grid(row = 3, column = 7,", "4) self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25) self.bprov15.grid(row = 15,", "def readf(): with open('all.txt', 'r') as f: line = '' for i in", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25,", "master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self): master0 =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self): master0 = Tk() master0.minsize(width =", "master0.mainloop() def sulut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width", "25) self.bprov31.grid(row = 14, column = 7, columnspan = 4) self.bprov32 = Button(self.master,", "= 600) self.master.title(\"Selamat Datang di McDones (Direktori Macanan Tradisional)\") self.master.judul = Label(self.master, text", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self): master0 = Tk() master0.minsize(width = 450, height", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "450, height = 100) master0.maxsize(width = 450, height = 100) master0.title(\"Direktori Makanan Tradisional", "= 4) master0.mainloop() def jatim1(self): master2 = Tk() master2.minsize(width = 800, height =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49]", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() if __name__ == \"__main__\":", "text1 = lst[31] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2,", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27]", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self): master2 = Tk() master2.minsize(width = 800, height", "command=self.dki1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas = Canvas(master2, width = 300, height =", "canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55] text2 = Text(master2, font = \"Arial", "= PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59] text2 = Text(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27] text2 = Text(master2, font = \"Arial 12\")", "4) master0.mainloop() def bali1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img)", "4) master0.mainloop() def sulbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "1, column = 6, columnspan = 4) master0.mainloop() def sumut1(self): master2 = Tk()", "3, columnspan = 4) self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25)", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self): master0 =", "4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row = 7, column", "def papua1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50] text2 = Text(master2, font = \"Arial", "= PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41] text2 = Text(master2,", "6, columnspan = 4) master0.mainloop() def goron1(self): master2 = Tk() master2.minsize(width = 800,", "Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20] text2 = Text(master2, font = \"Arial", "columnspan = 4) self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25) self.bprov2.grid(row", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa", "Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54] text2 = Text(master2, font = \"Arial", "= Button(self.master, text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row = 1, column = 7,", "4) self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25) self.bprov31.grid(row = 14,", "master2.mainloop() def __prov7(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8] text2 = Text(master2,", "= 1, column = 6, columnspan = 4) master0.mainloop() def malut1(self): master2 =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37] text2 = Text(master2, font = \"Arial 12\")", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW,", "6, columnspan = 4) master0.mainloop() def dki1(self): master2 = Tk() master2.minsize(width = 800,", "height = 600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas = Canvas(master2, width = 300,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "text1) text2.pack() master2.mainloop() def sulteng2(self): master2 = Tk() master2.minsize(width = 800, height =", "def beng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "text2.pack() master2.mainloop() def pabar2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "7, columnspan = 4) self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25)", "= 800, height = 600) master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2, width =", "anchor=NW, image=img) text1 = lst[44] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Nasi Sumsum',", "malut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "\"Pilih provinsi yang ingin anda ketahui\", font = \"Arial 16 bold\") self.master.judul.grid(row =", "= canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54] text2 = Text(master2, font", "columnspan = 4) master0.mainloop() def sumbar1(self): master2 = Tk() master2.minsize(width = 800, height", "canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24] text2", "= 4) master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width = 25, height=3) master0.bprov2.grid(row", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas = Canvas(master2,", "column = 3, columnspan = 4) self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9,", "= 4) master0.mainloop() def sulteng1(self): master2 = Tk() master2.minsize(width = 800, height =", "def diy1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "image=img) text1 = lst[32] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0] text2 = Text(master2, font = \"Arial", "Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46] text2 = Text(master2, font = \"Arial", "25) self.bprov22.grid(row = 5, column = 7, columnspan = 4) self.bprov23 = Button(self.master,", "= Button(master0, text='<NAME>', command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25,", "= 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas = Canvas(master2, width", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 3, columnspan = 4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width = 25)", "anchor=NW, image=img) text1 = lst[22] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "800, height = 600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas = Canvas(master2, width =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self): master0", "text='Gangan Asam Banjar', command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "lst[54] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "1, column = 6, columnspan = 4) master0.mainloop() def malut1(self): master2 = Tk()", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW,", "= 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width = 25, height=3) master0.bprov2.grid(row =", "= 4) master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row", "= Button(master0, text='<NAME>', command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW,", "image=img) text1 = lst[42] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self): master0", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "text1) text2.pack() master2.mainloop() def pabar2(self): master2 = Tk() master2.minsize(width = 800, height =", "def __prov28(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "7, columnspan = 4) self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width = 25)", "master2.mainloop() def sulsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "text='<NAME>', command=self.papua1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23] text2", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas = Canvas(master2,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self): master0", "lst[56] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self):", "text='Tapa Kolo', command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= 600) master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2, width = 300, height =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self): master0 = Tk() master0.minsize(width", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self):", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59]", "lst[2] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img)", "Button(master0, text='<NAME>', command=self.malut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "25) self.bprov27.grid(row = 10, column = 7, columnspan = 4) self.bprov28 = Button(self.master,", "anchor=NW, image=img) text1 = lst[55] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def dki1(self): master2", "column = 6, columnspan = 4) master0.mainloop() def riau1(self): master2 = Tk() master2.minsize(width", "canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20] text2 = Text(master2, font =", "= 4) master0.mainloop() def dki1(self): master2 = Tk() master2.minsize(width = 800, height =", "anchor=NW, image=img) text1 = lst[17] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\")", "4) self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width = 25) self.bprov33.grid(row = 16,", "master2.mainloop() def __prov17(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "master0.mainloop() def kalsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= Button(master0, text='Mie Aceh', command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "7, columnspan = 4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row", "text1) text2.pack() master2.mainloop() def __prov27(self): master0 = Tk() master0.minsize(width = 450, height =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self): master2", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text=\"Se'i\",", "Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62] text2 = Text(master2, font = \"Arial", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tempoyak',", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov13(self): master0 = Tk() master0.minsize(width =", "PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44] text2 = Text(master2, font", "Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "tradisional DI Yogyakarta\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sop", "self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25) self.bprov5.grid(row = 5, column", "master0.bprov2 = Button(master0, text='<NAME>', command=self.dki2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "master0.mainloop() def sulteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kue Timpan',", "PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58] text2 = Text(master2, font", "columnspan = 4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row", "canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21] text2", "columnspan = 4) master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width = 25, height=3)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW,", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5,", "img = PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65]", "= PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21] text2 = Text(master2,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self): master0 = Tk() master0.minsize(width = 450, height", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self): master0", "text1 = lst[64] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "columnspan = 4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3) master0.bprov2.grid(row", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lawa',", "800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas = Canvas(master2, width =", "command=self.__prov10, width = 25) self.bprov10.grid(row = 10, column = 3, columnspan = 4)", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self): master0 = Tk()", "tradisional Jawa TEngah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "__prov18(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1,", "text1 = lst[61] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self):", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39] text2 = Text(master2, font = \"Arial 12\")", "= lst[51] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self): master2 =", "canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38] text2 = Text(master2, font = \"Arial", "= Button(self.master, text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row = 1, column = 3,", "4) self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row =", "4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row = 1,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self): master0 = Tk() master0.minsize(width = 450, height", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self): master0 = Tk() master0.minsize(width =", "text1) text2.pack() master2.mainloop() def __prov33(self): master0 = Tk() master0.minsize(width = 450, height =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37] text2 =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self): master0 = Tk() master0.minsize(width =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self): master0 = Tk() master0.minsize(width = 450,", "master2.title(\"Makanan tradisional Papua Barat\") canvas = Canvas(master2, width = 300, height = 300)", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5,", "* lst = [] def readf(): with open('all.txt', 'r') as f: line =", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9] text2 = Text(master2, font = \"Arial 12\")", "4) self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row = 12,", "Button(master0, text='Soto Betawi', command=self.dki1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Timur\") canvas = Canvas(master2,", "height = 600) master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2, width = 300, height", "Button(master0, text='Sop Konro', command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row = 1, column", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sumbar1(self): master2", "= PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23] text2 =", "columnspan = 4) master0.mainloop() def jateng1(self): master2 = Tk() master2.minsize(width = 800, height", "text1 = lst[42] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65] text2 = Text(master2, font =", "columnspan = 4) self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25) self.bprov3.grid(row", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 25) self.bprov31.grid(row = 14, column = 7, columnspan = 4) self.bprov32 =", "canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21] text2 = Text(master2, font = \"Arial", "columnspan = 4) master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25, height=3)", "columnspan = 4) master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width = 25, height=3)", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img)", "malu1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text='Belacan Belitung', command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self): master0 = Tk() master0.minsize(width", "25) self.bprov10.grid(row = 10, column = 3, columnspan = 4) self.bprov11 = Button(self.master,", "= Button(master0, text='Soto Betawi', command=self.dki1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "makanan :)\", font = \"Arial 16 bold\") master0.judul.grid(row = 0, column = 2,", "= 800, height = 600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas = Canvas(master2, width", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTB\") canvas", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img)", "__prov28(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= 4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row = 17,", "text = \"Pilih provinsi yang ingin anda ketahui\", font = \"Arial 16 bold\")", "text1) text2.pack() master2.mainloop() def __prov4(self): master0 = Tk() master0.minsize(width = 450, height =", "command=self.__prov27, width = 25) self.bprov27.grid(row = 10, column = 7, columnspan = 4)", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self): master2 = Tk()", "= PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14] text2 =", "text2.pack() master2.mainloop() def lamp2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self): master0 = Tk() master0.minsize(width", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self): master0 = Tk()", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self): master2 = Tk() master2.minsize(width = 800,", "= 800, height = 600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas = Canvas(master2, width", "command=self.__prov24, width = 25) self.bprov24.grid(row = 7, column = 7, columnspan = 4)", "canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7] text2 = Text(master2, font = \"Arial", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas =", "= lst[14] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "800, height = 600) master2.title(\"Makanan tradisional BAnten\") canvas = Canvas(master2, width = 300,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1,", "text1) text2.pack() master2.mainloop() def __prov8(self): master0 = Tk() master0.minsize(width = 450, height =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas", "6, columnspan = 4) master0.mainloop() def pabar1(self): master2 = Tk() master2.minsize(width = 800,", "8, column = 7, columnspan = 4) self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text1 = lst[23] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "height = 600) master2.title(\"Makanan tradisional Sulawesi Barat \") canvas = Canvas(master2, width =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari',", "= 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop()", "self.bprov17.grid(row = 17, column = 3, columnspan = 4) self.bprov18 = Button(self.master, text='NTB',", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Barat\")", "= canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18] text2 = Text(master2, font", "columnspan = 4) master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25, height=3)", "6, column = 7, columnspan = 4) self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24,", "= 17, column = 3, columnspan = 4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18,", "\"Pilih makanan :)\", font = \"Arial 16 bold\") master0.judul.grid(row = 0, column =", "Button(master0, text='<NAME>', command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width =", "__prov11(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self):", "= 11, column = 7, columnspan = 4) self.bprov29 = Button(self.master, text='Sulawesi Selatan',", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self): master2 = Tk() master2.minsize(width", "master master.minsize(width = 500, height = 600) master.maxsize(width = 500, height = 600)", "= Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25) self.bprov22.grid(row = 5, column =", "command=self.__prov19, width = 25) self.bprov19.grid(row = 2, column = 7, columnspan = 4)", "= canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12] text2 = Text(master2, font", "__prov20(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5,", "command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "= 25) self.bprov2.grid(row = 2, column = 3, columnspan = 4) self.bprov3 =", "= PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58] text2 = Text(master2,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self):", "canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63]", "= Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row = 11, column =", "master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row = 1,", "Kalimantan Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "master2.mainloop() def sulgar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def bali1(self):", "kriau1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\")", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting Soka.png\")", "= 3, columnspan = 4) self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width =", "Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self): master2 = Tk() master2.minsize(width = 800, height", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25,", "= 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas", "= 25) self.bprov12.grid(row = 12, column = 3, columnspan = 4) self.bprov13 =", "= PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30] text2 =", "text2.pack() master2.mainloop() def sulut2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49] text2 = Text(master2, font = \"Arial", "= 7, columnspan = 4) self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self): master2 = Tk() master2.minsize(width = 800, height", "lst[10] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "def kriau2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2, width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW,", "= 600) master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2, width = 300, height =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self): master2 = Tk() master2.minsize(width", "= lst[55] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self): master0 = Tk() master0.minsize(width =", "master2.mainloop() def __prov34(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self): master2 = Tk()", "column = 7, columnspan = 4) self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width", "column = 6, columnspan = 4) master0.mainloop() def goron1(self): master2 = Tk() master2.minsize(width", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width", "canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43] text2", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "text2.pack() master2.mainloop() def __prov26(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row = 6, column", "def jambi2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "column = 6, columnspan = 4) master0.mainloop() def ban1(self): master2 = Tk() master2.minsize(width", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5,", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Papeda',", "= 6, columnspan = 4) master0.mainloop() def diy1(self): master2 = Tk() master2.minsize(width =", "columnspan = 4) self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25)", "600) master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2, width = 300, height = 300)", "= 7, columnspan = 4) self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW,", "= lst[48] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Mie", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2,", "lst[18] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 25) self.bprov5.grid(row = 5, column = 3, columnspan = 4) self.bprov6 =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img)", "img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6] text2", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self): master2 = Tk() master2.minsize(width = 800, height", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self): master2", "master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[53] text2", "master2.mainloop() def kalteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "tradisional NTB\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "4, column = 3, columnspan = 4) self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5,", "text1 = lst[35] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row =", "4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row = 15, column", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5,", "= 4) master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row", "text2.pack() master2.mainloop() def kbang2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61] text2 =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW,", "= 600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas = Canvas(master2, width = 300, height", "Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "anchor=NW, image=img) text1 = lst[33] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Gorontalo\") canvas = Canvas(master2,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1,", "= 7, columnspan = 4) self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width =", "25) self.bprov29.grid(row = 12, column = 7, columnspan = 4) self.bprov30 = Button(self.master,", "text1 = lst[60] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "columnspan = 4) master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width = 25,", "canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54] text2 = Text(master2, font =", "columnspan = 4) self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25) self.bprov24.grid(row", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self): master2 = Tk() master2.minsize(width", "def kaltim1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= lst[59] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sumbar1(self):", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self): master0 = Tk() master0.minsize(width =", "anchor=NW, image=img) text1 = lst[37] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas", "Kalimantan Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self): master0 = Tk() master0.minsize(width = 450, height", "6, columnspan = 4) master0.mainloop() def jateng1(self): master2 = Tk() master2.minsize(width = 800,", "anchor=NW, image=img) text1 = lst[65] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "aceh2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= 4) self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25) self.bprov24.grid(row =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "anchor=NW, image=img) text1 = lst[66] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "1, column = 6, columnspan = 4) master0.mainloop() def dki1(self): master2 = Tk()", "800, height = 600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas = Canvas(master2, width =", "command=self.__prov6, width = 25) self.bprov6.grid(row = 6, column = 3, columnspan = 4)", "master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row = 1,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img)", "column = 6, columnspan = 4) master0.mainloop() def ntt1(self): master2 = Tk() master2.minsize(width", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Selatan\") canvas =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img)", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img)", "Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row = 8, column = 3, columnspan =", "image=img) text1 = lst[62] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row", "height = 600) master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2, width = 300, height", "def __prov3(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DKI Jakarta\") canvas", "text2.pack() master2.mainloop() def kalbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "image=img) text1 = lst[6] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "command=self.riau1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self): master0 = Tk() master0.minsize(width = 450, height", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kbang1(self):", "PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23] text2 = Text(master2,", "column = 6, columnspan = 4) master0.mainloop() def diy1(self): master2 = Tk() master2.minsize(width", "= PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19] text2 =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self): master0", "height = 600) master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2, width = 300, height", "3, column = 7, columnspan = 4) self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21,", "= 3, columnspan = 4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width = 25)", "command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku Utara\") canvas = Canvas(master2,", "command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "600) master2.title(\"Makanan tradisional Papua Barat\") canvas = Canvas(master2, width = 300, height =", "PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13] text2 = Text(master2,", "command=self.__prov23, width = 25) self.bprov23.grid(row = 6, column = 7, columnspan = 4)", "canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6]", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self): master2 = Tk()", "= 6, column = 3, columnspan = 4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7,", "columnspan = 4) master0.mainloop() def malut1(self): master2 = Tk() master2.minsize(width = 800, height", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def jateng1(self):", "def jatim2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "Button(master0, text='Bika Ambon', command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self): master0", "Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row = 9, column =", "6, columnspan = 4) master0.mainloop() def jambi1(self): master2 = Tk() master2.minsize(width = 800,", "16, column = 7, columnspan = 4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width", "img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34] text2", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36] text2 = Text(master2, font = \"Arial 12\")", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\")", "lst[45] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 3, columnspan = 4) self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self):", "cmp = f.readline() lst.append(txt) line = f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()): self.master", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\")", "= canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67] text2 = Text(master2, font =", "def __prov16(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "text2.pack() master2.mainloop() def __prov32(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66] text2 = Text(master2, font =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self): master2 = Tk() master2.minsize(width = 800,", "column = 3, columnspan = 4) self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width", "height = 600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas = Canvas(master2, width = 300,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self): master0 = Tk()", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulsel2(self):", "text='Kepulauan Riau', command=self.__prov5, width = 25) self.bprov5.grid(row = 5, column = 3, columnspan", "Button(self.master, text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row = 8, column = 7, columnspan", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25,", "= 4) master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Belacan Belitung',", "= 25) self.bprov17.grid(row = 17, column = 3, columnspan = 4) self.bprov18 =", "text1 = lst[24] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "image=img) text1 = lst[23] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self): master2 = Tk()", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumut2(self):", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke Na Niura.png\")", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Rawon',", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku Utara\")", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov29(self): master0 = Tk() master0.minsize(width = 450,", "master0.mainloop() def malu1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= 3, columnspan = 4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width = 25)", "image=img) text1 = lst[21] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas = Canvas(master2, width = 300, height =", "master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "text='Kohu-kohu', command=self.malu2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36] text2 = Text(master2, font =", "def ntb2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row = 10,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jambi\") canvas =", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row", "text='<NAME>', command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self): master0 = Tk() master0.minsize(width =", "= 7, columnspan = 4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width = 25)", "= 4) master0.mainloop() def bali1(self): master2 = Tk() master2.minsize(width = 800, height =", "anchor=NW, image=img) text1 = lst[52] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18]", "= 800, height = 600) master2.title(\"Makanan tradisional Jawa Timur\") canvas = Canvas(master2, width", "600) master2.title(\"Makanan tradisional Jawa Timur\") canvas = Canvas(master2, width = 300, height =", "command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sulut1(self):", "= 4) master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row", "100) master0.maxsize(width = 450, height = 100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul =", "master2.mainloop() def aceh2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Button(self.master, text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row = 4, column = 3, columnspan", "4) master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row =", "def papua2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self): master2 = Tk() master2.minsize(width", "width = 25) self.bprov17.grid(row = 17, column = 3, columnspan = 4) self.bprov18", "img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4] text2 =", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Selatan\")", "height = 600) master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2, width = 300, height", "4) master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas = Canvas(master2,", "master0.mainloop() def ntt1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "column = 3, columnspan = 4) self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width", "master2.mainloop() def __prov22(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "image=img) text1 = lst[20] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "800, height = 600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas = Canvas(master2, width =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self):", "command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "= 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2", "= 3, column = 7, columnspan = 4) self.bprov21 = Button(self.master, text='Kalimantan Barat',", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas = Canvas(master2,", "text1) text2.pack() master2.mainloop() def __prov7(self): master0 = Tk() master0.minsize(width = 450, height =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bolu Kemojo.png\")", "Sumatera Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1,", "Button(master0, text='Bagar Hiu', command=self.beng2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "anchor=NW, image=img) text1 = lst[42] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self): master0 = Tk() master0.minsize(width", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4] text2 = Text(master2, font = \"Arial 12\")", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21]", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row = 15, column =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self): master0 =", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width = 25,", "text1 = lst[57] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row", "height = 600) self.master.title(\"Selamat Datang di McDones (Direktori Macanan Tradisional)\") self.master.judul = Label(self.master,", "Button(self.master, text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row = 10, column = 3, columnspan", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width = 25, height=3) master0.bprov2.grid(row", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kue", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self):", "Taboh', command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= lst[60] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gong", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self):", "= lst[53] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Bubur Paddas", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5,", "= Button(master0, text='<NAME>', command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width", "Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= Button(master0, text='<NAME>', command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width =", "= 13, column = 7, columnspan = 4) self.bprov31 = Button(self.master, text='Maluku Utara',", "text1 = lst[27] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "def kalut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "from tkinter import * lst = [] def readf(): with open('all.txt', 'r') as", "16, column = 3, columnspan = 4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self):", "image=img) text1 = lst[45] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[5] text2", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\")", "command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "800, height = 600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas = Canvas(master2, width =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Apang Bugis.png\")", "master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row = 1,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\")", "image=img) text1 = lst[38] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate", "column = 7, columnspan = 4) self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width", "text1 = lst[1] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width =", "= 5, column = 7, columnspan = 4) self.bprov23 = Button(self.master, text='Kalimantan Selatan',", "25) self.bprov34.grid(row = 17, column = 7, columnspan = 4) self.master.mainloop() def __prov1(self):", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5,", "columnspan = 4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25) self.bprov30.grid(row", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self): master0 = Tk() master0.minsize(width =", "height = 600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas = Canvas(master2, width = 300,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas = Canvas(master2,", "= 6, columnspan = 4) master0.mainloop() def papua1(self): master2 = Tk() master2.minsize(width =", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1,", "100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul = Label(master0, text = \"Pilih makanan :)\",", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self): master0", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kriau1(self):", "canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19]", "text2.pack() master2.mainloop() def ntb2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "1, column = 6, columnspan = 4) master0.mainloop() def ntt1(self): master2 = Tk()", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1,", "= Button(master0, text='Sop Konro', command=self.sulsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "800, height = 600) master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2, width = 300,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[21] text2 = Text(master2, font = \"Arial 12\")", "lst[65] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Hiu', command=self.beng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= 25) self.bprov15.grid(row = 15, column = 3, columnspan = 4) self.bprov16 =", "4) master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25, height=3) master0.bprov1.grid(row =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28] text2 = Text(master2, font = \"Arial 12\")", "canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25] text2 = Text(master2, font = \"Arial", "Papua\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row = 17, column = 7, columnspan =", "25) self.bprov1.grid(row = 1, column = 3, columnspan = 4) self.bprov2 = Button(self.master,", "def kbang1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "lst[3] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional BAnten\")", "Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17] text2 = Text(master2, font = \"Arial", "Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27] text2 = Text(master2, font = \"Arial", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2,", "text2.pack() master2.mainloop() def sumbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "def ban2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width = 25, height=3) master0.bprov1.grid(row =", "= Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row = 8, column =", "= 4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row = 15,", "Jambi\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "columnspan = 4) master0.mainloop() def sulgar1(self): master2 = Tk() master2.minsize(width = 800, height", "Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row = 6, column = 7, columnspan =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self):", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulgar2(self): master2 =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self): master2 = Tk() master2.minsize(width =", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jepa',", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1,", "800, height = 600) master2.title(\"Makanan tradisional Maluku Utara\") canvas = Canvas(master2, width =", "= 7, columnspan = 4) self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width =", "= Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25) self.bprov16.grid(row = 16, column =", "anchor=NW, image=img) text1 = lst[4] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row = 1,", "master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "Kalimantan Tengah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "beng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "image=img) text1 = lst[18] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\") canvas.create_image(5,5,", "PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[45] text2 =", "= 4) self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25) self.bprov20.grid(row =", "self.master = master master.minsize(width = 500, height = 600) master.maxsize(width = 500, height", "img = PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2]", "25) self.bprov12.grid(row = 12, column = 3, columnspan = 4) self.bprov13 = Button(self.master,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumbar2(self): master2 = Tk() master2.minsize(width =", "= 25) self.bprov34.grid(row = 17, column = 7, columnspan = 4) self.master.mainloop() def", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self): master0 = Tk() master0.minsize(width", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3)", "7, columnspan = 4) self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25)", "image=img) text1 = lst[24] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width = 25,", "= 6, columnspan = 4) master0.mainloop() def jambi1(self): master2 = Tk() master2.minsize(width =", "5, column = 3, columnspan = 4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width", "text='Gong gong', command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self): master2 = Tk() master2.minsize(width = 800,", "PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20] text2 = Text(master2,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW,", "img = PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20] text2", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1,", "= 1, column = 6, columnspan = 4) master0.mainloop() def malu1(self): master2 =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan = 4)", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img)", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self): master2 = Tk()", "master2.mainloop() def ntb2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self):", "height = 100) master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul = Label(master0, text = \"Pilih", "= 800, height = 600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas = Canvas(master2, width", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Coto Makassar',", "column = 3, columnspan = 4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width =", "4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row = 11, column", "text2.pack() master2.mainloop() def __prov4(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "def __prov11(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self): master2 = Tk()", "= 4) master0.mainloop() def sumsel1(self): master2 = Tk() master2.minsize(width = 800, height =", "diy1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "master0.mainloop() def kaltim1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width =", "height = 600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas = Canvas(master2, width = 300,", "text1) text2.pack() master2.mainloop() def __prov30(self): master0 = Tk() master0.minsize(width = 450, height =", "text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row = 10, column = 3, columnspan =", "column = 6, columnspan = 4) master0.mainloop() def kaltim1(self): master2 = Tk() master2.minsize(width", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def diy2(self): master2 =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0] text2 = Text(master2, font = \"Arial 12\")", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional BAnten\") canvas =", "= 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img)", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tinutuan',", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 4) self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25) self.bprov9.grid(row", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self): master0 = Tk()", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\")", "img = PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64]", "= 4) master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row", "__prov2(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25) self.bprov27.grid(row = 10, column =", "column = 3, columnspan = 4) self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width", "text='Sulawesi Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row = 12, column = 7, columnspan", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def malu1(self):", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self): master2 = Tk()", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2,", "def sulut2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "7, columnspan = 4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row", "PhotoImage(master = canvas,file=\"Papeda.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67] text2 = Text(master2, font", "text1) text2.pack() master2.mainloop() def kbang2(self): master2 = Tk() master2.minsize(width = 800, height =", "master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kohu-kohu',", "11, column = 7, columnspan = 4) self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29,", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW,", "= 25) self.bprov14.grid(row = 14, column = 3, columnspan = 4) self.bprov15 =", "master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "= 4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row = 7,", "= canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65] text2 = Text(master2,", "= Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self): master0 = Tk() master0.minsize(width = 450, height", "anchor=NW, image=img) text1 = lst[28] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "text1) text2.pack() master2.mainloop() def beng2(self): master2 = Tk() master2.minsize(width = 800, height =", "= 9, column = 7, columnspan = 4) self.bprov27 = Button(self.master, text='Sulawesi Barat',", "Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "anchor=NW, image=img) text1 = lst[60] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "image=img) text1 = lst[17] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "lst[28] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "line = f.readline() class DirektoriMakanan(): def __init__(self, master=Tk()): self.master = master master.minsize(width =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self): master0", "text='Apang Bugis', command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self):", "sumbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 4) master0.mainloop() def aceh1(self): master2 = Tk() master2.minsize(width = 800, height =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kbang1, width = 25, height=3)", "master2.mainloop() def __prov8(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self): master2 = Tk() master2.minsize(width =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self): master2 = Tk() master2.minsize(width = 800,", "= Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "columnspan = 4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row =", "anchor=NW, image=img) text1 = lst[56] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "3, columnspan = 4) self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25)", "= 4) master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3) master0.bprov1.grid(row =", "Binti', command=self.beng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41] text2", "= 4) master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row", "master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tapa Kolo',", "columnspan = 4) master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25, height=3)", "600) master2.title(\"Makanan tradisional Bangka Belitung\") canvas = Canvas(master2, width = 300, height =", "akhir mpkta #semoga lancar #aminnn from tkinter import * lst = [] def", "text1 = lst[32] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\")", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kriau1, width = 25, height=3) master0.bprov1.grid(row = 1,", "lst[32] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "self.bprov21.grid(row = 4, column = 7, columnspan = 4) self.bprov22 = Button(self.master, text='Kalimantan", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jadah Tempe',", "master0.judul = Label(master0, text = \"Pilih makanan :)\", font = \"Arial 16 bold\")", "Gorontalo\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17] text2 = Text(master2, font =", "= lst[52] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Selatan\")", "= Button(self.master, text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row = 17, column = 7,", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def ban1(self):", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width = 25,", "lst[49] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "image=img) text1 = lst[60] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width = 25, height=3) master0.bprov2.grid(row", "25) self.bprov33.grid(row = 16, column = 7, columnspan = 4) self.bprov34 = Button(self.master,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self): master0", "= lst[65] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def papua2(self): master2", "= lst[38] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "master0.title(\"Direktori Makanan Tradisional Nusantara\") master0.judul = Label(master0, text = \"Pilih makanan :)\", font", "master2.mainloop() def papua2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width = 25) self.bprov19.grid(row = 2, column =", "self.master.mainloop() def __prov1(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24] text2 = Text(master2, font =", "text1) text2.pack() master2.mainloop() def __prov23(self): master0 = Tk() master0.minsize(width = 450, height =", "text1 = lst[53] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 25) self.bprov13.grid(row = 13, column = 3, columnspan = 4) self.bprov14 =", "PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2] text2 =", "= 800, height = 600) master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2, width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5,", "line): line = f.readline() cmp = '' txt = '' while('<end>' not in", "canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[64] text2 = Text(master2, font", "= Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25) self.bprov26.grid(row = 9, column =", "__prov30(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33] text2 =", "canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60] text2 = Text(master2, font =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self): master0 =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25,", "line = '' for i in range(68): while('<deskripsi>' not in line): line =", "text1 = lst[34] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\")", "def pabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 4) self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row = 4,", "4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self): master0 = Tk() master0.minsize(width", "self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row = 10, column =", "= canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34] text2 = Text(master2, font", "4) master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row =", "sulsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas = Canvas(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55] text2 = Text(master2, font = \"Arial 12\")", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self):", "self.bprov29.grid(row = 12, column = 7, columnspan = 4) self.bprov30 = Button(self.master, text='Sulawesi", "4) master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25, height=3) master0.bprov2.grid(row =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16] text2 = Text(master2, font = \"Arial 12\")", "text='Kaledo', command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sop Konro',", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self): master0 = Tk() master0.minsize(width = 450,", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row =", "self.bprov12.grid(row = 12, column = 3, columnspan = 4) self.bprov13 = Button(self.master, text='DKI", "[] def readf(): with open('all.txt', 'r') as f: line = '' for i", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov27(self):", "tradisional Kalimnantan Timur\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "command=self.__prov16, width = 25) self.bprov16.grid(row = 16, column = 3, columnspan = 4)", "self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25) self.bprov3.grid(row = 3, column", "= canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[27] text2 = Text(master2, font", "Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28] text2 = Text(master2, font = \"Arial", "bali1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "lst[55] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "text='Papeda', command=self.papua2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "text1 = lst[21] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7] text2 = Text(master2, font", "__prov9(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "def goron2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "4) self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width = 25) self.bprov34.grid(row = 17, column", "12, column = 3, columnspan = 4) self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13,", "6, columnspan = 4) master0.mainloop() def ntt1(self): master2 = Tk() master2.minsize(width = 800,", "= 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas = Canvas(master2, width", "lst[24] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW,", "= PhotoImage(master = canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43] text2 = Text(master2,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self): master0 = Tk()", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW,", "def malut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self):", "1, column = 6, columnspan = 4) master0.mainloop() def lamp1(self): master2 = Tk()", "command=self.__prov15, width = 25) self.bprov15.grid(row = 15, column = 3, columnspan = 4)", "bold\") master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0,", "master2.mainloop() def __prov23(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "= 1, column = 6, columnspan = 4) master0.mainloop() def aceh1(self): master2 =", "DKI Jakarta\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "lst[41] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 25) self.bprov4.grid(row = 4, column = 3, columnspan = 4) self.bprov5 =", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self): master0 = Tk() master0.minsize(width =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63] text2 = Text(master2,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov25(self): master0 = Tk()", "= lst[25] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42]", "__prov31(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "text1) text2.pack() master2.mainloop() def __prov17(self): master0 = Tk() master0.minsize(width = 450, height =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5,", "PhotoImage(master = canvas,file=\"Kasoami.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[59] text2 = Text(master2, font", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Riau\") canvas", "text2.pack() master2.mainloop() def __prov6(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width = 25, height=3)", "def __prov31(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Button(master0, text='<NAME>', command=self.riau1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width =", "= 600) master2.title(\"Makanan tradisional DI Yogyakarta\") canvas = Canvas(master2, width = 300, height", "def __prov4(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "tradisional Sulawesi Barat \") canvas = Canvas(master2, width = 300, height = 300)", "diy2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov30(self): master0 = Tk() master0.minsize(width", "tradisional Kalimantan Selatan\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img)", "master2.mainloop() def __prov30(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "text='Sate Ulat Sagu', command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "def __prov1(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self): master0 =", "text1 = lst[47] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov10(self): master0 =", "4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width = 25, height=3) master0.bprov2.grid(row = 1,", "4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row = 1,", "25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self): master0 =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self):", "self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25) self.bprov26.grid(row = 9, column", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self): master2 = Tk()", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def bali2(self): master2 = Tk() master2.minsize(width = 800, height", "= PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[42] text2 = Text(master2,", "text2.pack() master2.mainloop() def __prov24(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "papua2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "img = PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[62]", "lst[35] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "master2.mainloop() def __prov21(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self): master2 = Tk() master2.minsize(width", "width = 25) self.bprov32.grid(row = 15, column = 7, columnspan = 4) self.bprov33", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bangka Belitung\")", "Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34] text2 = Text(master2, font = \"Arial", "master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self): master0 = Tk()", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self): master2 = Tk()", "image=img) text1 = lst[54] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1] text2 = Text(master2, font =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31] text2", "800, height = 600) master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2, width = 300,", "text2.pack() master2.mainloop() def __prov25(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self): master2 = Tk() master2.minsize(width =", "Aceh', command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self): master0 = Tk() master0.minsize(width = 450,", "= 1, column = 7, columnspan = 4) self.bprov19 = Button(self.master, text='NTT', command=self.__prov19,", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25,", "= PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65] text2", "= PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54] text2 =", "= 4) self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row =", "1, column = 6, columnspan = 4) master0.mainloop() def kriau1(self): master2 = Tk()", "= 16, column = 3, columnspan = 4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17,", "master2.mainloop() def __prov27(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "Button(self.master, text='Papua Barat', command=self.__prov33, width = 25) self.bprov33.grid(row = 16, column = 7,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov24(self): master0 = Tk() master0.minsize(width = 450,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self): master2 =", "= 800, height = 600) master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20]", "4) master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25, height=3) master0.bprov1.grid(row =", "= 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas = Canvas(master2, width", "= 4) master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kaltim2(self): master2 = Tk() master2.minsize(width = 800, height", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jepa.png\") canvas.create_image(5,5, anchor=NW, image=img)", "__prov5(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width = 25,", "Tengha\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "master0.mainloop() def pabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai", "columnspan = 4) master0.mainloop() def jatim1(self): master2 = Tk() master2.minsize(width = 800, height", "2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.bali1, width = 25, height=3)", "= lst[41] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38] text2 = Text(master2, font =", "column = 6, columnspan = 4) master0.mainloop() def kriau1(self): master2 = Tk() master2.minsize(width", "text='Soto Kudus', command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25, height=3) master0.bprov1.grid(row = 1,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kepulauan", "def __prov6(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "15, column = 3, columnspan = 4) self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self): master0", "text='Uta Kelo', command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "width = 25) self.bprov10.grid(row = 10, column = 3, columnspan = 4) self.bprov11", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def malut1(self):", "lst[40] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "image=img) text1 = lst[40] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49] text2", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2,", "= 25) self.bprov29.grid(row = 12, column = 7, columnspan = 4) self.bprov30 =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self): master2 =", "tradisional Sumatera Selatan\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width = 25,", "command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row = 1,", "PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47] text2 = Text(master2,", "img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51] text2 =", "= lst[13] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulteng2(self): master2 = Tk()", "img = PhotoImage(master = canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8] text2", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self): master2 = Tk() master2.minsize(width =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5,", "mpkta #semoga lancar #aminnn from tkinter import * lst = [] def readf():", "canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[47] text2 = Text(master2, font =", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov18(self): master0 = Tk()", "= 600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas = Canvas(master2, width = 300, height", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self): master2", "anchor=NW, image=img) text1 = lst[62] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 6, columnspan = 4) master0.mainloop() def jatim1(self): master2 = Tk() master2.minsize(width =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov31(self):", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ikan Bakar Manokwari.png\") canvas.create_image(5,5, anchor=NW,", "master=Tk()): self.master = master master.minsize(width = 500, height = 600) master.maxsize(width = 500,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self): master2 = Tk() master2.minsize(width = 800, height", "= 450, height = 100) master0.maxsize(width = 450, height = 100) master0.title(\"Direktori Makanan", "canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width = 25, height=3) master0.bprov1.grid(row", "in cmp): txt += cmp cmp = f.readline() lst.append(txt) line = f.readline() class", "lst[34] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 3, columnspan = 4) self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width =", "anchor=NW, image=img) text1 = lst[64] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "text1 = lst[8] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 600) master2.title(\"Makanan tradisional Sulawesi Barat \") canvas = Canvas(master2, width = 300,", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\")", "def __prov23(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= 800, height = 600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas = Canvas(master2, width", "Jakarta\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "columnspan = 4) master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25, height=3)", "Barat', command=self.__prov27, width = 25) self.bprov27.grid(row = 10, column = 7, columnspan =", "PhotoImage(master = canvas,file=\"Mie Aceh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[0] text2 = Text(master2,", "= 800, height = 600) master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2, width =", "text1) text2.pack() master2.mainloop() def riau2(self): master2 = Tk() master2.minsize(width = 800, height =", "4) master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\") canvas.create_image(5,5, anchor=NW, image=img)", "tradisional Sumatera Utara\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5,", "= lst[34] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46] text2 = Text(master2, font", "text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row = 7, column = 3, columnspan =", "PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50] text2 = Text(master2, font", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Kelapertaar',", "command=self.__prov29, width = 25) self.bprov29.grid(row = 12, column = 7, columnspan = 4)", "text2.pack() master2.mainloop() def sumut2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= Button(self.master, text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row = 6, column = 3,", "text1 = lst[33] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "text='<NAME>', command=self.dki2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width", "text='Sumatera Barat', command=self.__prov3, width = 25) self.bprov3.grid(row = 3, column = 3, columnspan", "Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "text1) text2.pack() master2.mainloop() def kalteng2(self): master2 = Tk() master2.minsize(width = 800, height =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.ban2, width =", "= 4) master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3) master0.bprov2.grid(row", "txt = '' while('<end>' not in cmp): txt += cmp cmp = f.readline()", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self): master2 = Tk() master2.minsize(width = 800, height", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self): master0", "canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4] text2 = Text(master2, font = \"Arial", "column = 6, columnspan = 4) master0.mainloop() def beng1(self): master2 = Tk() master2.minsize(width", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\")", "def kbang2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "width = 25) self.bprov25.grid(row = 8, column = 7, columnspan = 4) self.bprov26", "height = 600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas = Canvas(master2, width = 300,", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Utara\")", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self): master0 =", "img = PhotoImage(master = canvas,file=\"Lawa.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[38] text2 =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self): master0 = Tk() master0.minsize(width = 450,", "= 6, columnspan = 4) master0.mainloop() def pabar1(self): master2 = Tk() master2.minsize(width =", "= Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master =", "PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29] text2 = Text(master2,", "4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row = 1,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text1) text2.pack() master2.mainloop() def bali2(self): master2 = Tk() master2.minsize(width = 800, height =", "= PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29] text2 =", "def aceh1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5,", "text='Jadah Tempe', command=self.diy2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bilenthango.png\")", "canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55] text2", "4) master0.mainloop() def sulgar1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= canvas,file=\"Rujak Cingur.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[30] text2 = Text(master2, font", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self): master0 =", "= 4) master0.bprov2 = Button(master0, text='<NAME>u', command=self.bali2, width = 25, height=3) master0.bprov2.grid(row =", "command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Coto", "master2.mainloop() def kbang2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "columnspan = 4) master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width = 25, height=3)", "1, column = 6, columnspan = 4) master0.mainloop() def jambi1(self): master2 = Tk()", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2,", "1, column = 6, columnspan = 4) master0.mainloop() def sulgar1(self): master2 = Tk()", "Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "height = 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas = Canvas(master2, width = 300,", "= 10, column = 3, columnspan = 4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width =", "= PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[16] text2 =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32] text2 = Text(master2, font = \"Arial 12\")", "= canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17] text2 = Text(master2, font", "Bakar Manokwari', command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "= 4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3) master0.bprov2.grid(row =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "columnspan = 4) master0.mainloop() def kalteng1(self): master2 = Tk() master2.minsize(width = 800, height", "= 800, height = 600) master2.title(\"Makanan tradisional Kalimnantan Timur\") canvas = Canvas(master2, width", "open('all.txt', 'r') as f: line = '' for i in range(68): while('<deskripsi>' not", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self):", "column = 6, columnspan = 4) master0.mainloop() def sulgar1(self): master2 = Tk() master2.minsize(width", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jambi\") canvas", "command=self.__prov2, width = 25) self.bprov2.grid(row = 2, column = 3, columnspan = 4)", "600) master2.title(\"Makanan tradisional Jambi\") canvas = Canvas(master2, width = 300, height = 300)", "4) master0.mainloop() def papua1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= Button(master0, text=\"Se'i\", command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov23(self): master0 = Tk() master0.minsize(width", "text1 = lst[18] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2,", "= 25) self.bprov26.grid(row = 9, column = 7, columnspan = 4) self.bprov27 =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= Button(self.master, text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row = 17, column = 3,", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lawa.png\")", "Ambon', command=self.sumut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "tradisional Lampung\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "columnspan = 4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 4) master0.mainloop() def sumbar1(self): master2 = Tk() master2.minsize(width = 800, height =", "Kalimantan Tengha\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2,", "image=img) text1 = lst[11] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Betawi.png\")", "text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row = 1, column = 3, columnspan =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self): master0 = Tk() master0.minsize(width = 450,", "text='Rujak Cingur', command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12]", "master2.title(\"Makanan tradisional Kalimantan Timur\") canvas = Canvas(master2, width = 300, height = 300)", "= 25) self.bprov1.grid(row = 1, column = 3, columnspan = 4) self.bprov2 =", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5,", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua\")", "PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[41] text2 = Text(master2, font", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal Colo Colo.png\")", "Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25) self.bprov29.grid(row = 12, column = 7,", "text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "text2.pack() master2.mainloop() def __prov15(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "text2.pack() master2.mainloop() def __prov21(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "= PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25] text2 = Text(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66] text2 = Text(master2, font = \"Arial 12\")", "def __prov5(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "column = 7, columnspan = 4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\")", "text1) text2.pack() master2.mainloop() def __prov32(self): master0 = Tk() master0.minsize(width = 450, height =", "text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row = 6, column = 3, columnspan =", "anchor=NW, image=img) text1 = lst[8] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulbar2(self): master2 =", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width", "7, column = 3, columnspan = 4) self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ampiang Dadiah.png\") canvas.create_image(5,5, anchor=NW, image=img)", "image=img) text1 = lst[26] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25) self.bprov2.grid(row = 2, column = 3,", "Timur', command=self.__prov24, width = 25) self.bprov24.grid(row = 7, column = 7, columnspan =", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25,", "= PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[2] text2", "Button(master0, text='Jadah Tempe', command=self.diy2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3)", "sulteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text1) text2.pack() master2.mainloop() def sulsel2(self): master2 = Tk() master2.minsize(width = 800, height =", "= Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "canvas,file=\"Sate Ulat Sagu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[65] text2 = Text(master2, font", "anchor=NW, image=img) text1 = lst[50] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "kaltim1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "self.bprov25.grid(row = 8, column = 7, columnspan = 4) self.bprov26 = Button(self.master, text='Sulawesi", "canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37] text2 = Text(master2, font =", "text2.pack() master2.mainloop() def jambi2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tiwul.png\")", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self): master0 = Tk() master0.minsize(width = 450,", "= canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9] text2 = Text(master2, font", "column = 6, columnspan = 4) master0.mainloop() def kalut1(self): master2 = Tk() master2.minsize(width", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2,", "column = 6, columnspan = 4) master0.mainloop() def dki1(self): master2 = Tk() master2.minsize(width", "text='Riau', command=self.__prov4, width = 25) self.bprov4.grid(row = 4, column = 3, columnspan =", "tradisional Jawa Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua Barat\")", "text2.pack() master2.mainloop() def malu2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov3(self):", "= 11, column = 3, columnspan = 4) self.bprov12 = Button(self.master, text='Jawa Barat',", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[33] text2 = Text(master2, font = \"Arial 12\")", "__prov16(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "= PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[44] text2 = Text(master2,", "image=img) text1 = lst[48] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "4) master0.mainloop() def kbang1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49] text2 = Text(master2, font =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jabar2(self): master2 = Tk() master2.minsize(width = 800,", "master2.mainloop() def sulbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "master2.mainloop() def __prov15(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sop Konro.png\")", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self): master0 = Tk() master0.minsize(width = 450,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Asidah.png\") canvas.create_image(5,5,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov2(self): master0 =", "= 4) master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width = 25,", "text1 = lst[0] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def diy1(self):", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov21(self): master0 = Tk() master0.minsize(width", "command=self.sulgar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "sumut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self): master0 = Tk() master0.minsize(width", "PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51] text2 = Text(master2, font", "img = PhotoImage(master = canvas,file=\"Nasi Grombyang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[29] text2", "master2.title(\"Makanan tradisional Jawa Timur\") canvas = Canvas(master2, width = 300, height = 300)", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width = 25,", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2, width", "image=img) text1 = lst[31] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "text='Bubur Paddas Sambas', command=self.kalbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "master0.mainloop() def jateng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[7] text2 = Text(master2, font = \"Arial 12\")", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa Timur\") canvas =", "4) master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3) master0.bprov2.grid(row =", "= 4) master0.mainloop() def kriau1(self): master2 = Tk() master2.minsize(width = 800, height =", "self.bprov1.grid(row = 1, column = 3, columnspan = 4) self.bprov2 = Button(self.master, text='Sumatera", "aceh1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "self.bprov6.grid(row = 6, column = 3, columnspan = 4) self.bprov7 = Button(self.master, text='Bengukulu',", "NTT\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "def kalteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW,", "master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas = Canvas(master2, width = 300, height = 300)", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self): master0", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row = 1,", "master0.mainloop() def kalteng1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov4(self): master0 = Tk() master0.minsize(width", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self): master2 = Tk() master2.minsize(width = 800, height", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DI Yogyakarta\")", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self): master0 = Tk() master0.minsize(width = 450,", "= Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= Button(self.master, text='Maluku', command=self.__prov32, width = 25) self.bprov32.grid(row = 15, column = 7,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca", "Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19] text2 = Text(master2, font = \"Arial", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW,", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Gerombyang',", "Cingur', command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "text2.pack() master2.mainloop() def ntt2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self): master0", "PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60] text2 = Text(master2,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60] text2 = Text(master2, font = \"Arial 12\")", "master0.bprov2 = Button(master0, text='<NAME>', command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3)", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width = 25, height=3)", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def beng1(self): master2", "= 4) self.master.mainloop() def __prov1(self): master0 = Tk() master0.minsize(width = 450, height =", "text='<NAME>', command=self.malut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "master2.title(\"Makanan tradisional Jawa TEngah\") canvas = Canvas(master2, width = 300, height = 300)", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self): master2 =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self): master0 = Tk() master0.minsize(width = 450, height", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def ban1(self): master2", "6, columnspan = 4) master0.mainloop() def sulteng1(self): master2 = Tk() master2.minsize(width = 800,", "column = 7, columnspan = 4) self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width", "master2.mainloop() def __prov25(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "25) self.bprov2.grid(row = 2, column = 3, columnspan = 4) self.bprov3 = Button(self.master,", "columnspan = 4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row", "master0.mainloop() def dki1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "4) master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "800, height = 600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas = Canvas(master2, width =", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width = 25, height=3) master0.bprov1.grid(row = 1,", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov22(self): master0 = Tk() master0.minsize(width = 450, height", "= 4) master0.mainloop() def kaltim1(self): master2 = Tk() master2.minsize(width = 800, height =", "4) master0.mainloop() def malut1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "bali2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width = 25,", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 4) self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25) self.bprov2.grid(row =", "= 1, column = 6, columnspan = 4) master0.mainloop() def sumut1(self): master2 =", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def ntb1(self): master2", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov15(self): master0 = Tk() master0.minsize(width = 450, height", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row = 6,", "= 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1]", "anchor=NW, image=img) text1 = lst[30] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "= 1, column = 6, columnspan = 4) master0.mainloop() def kalut1(self): master2 =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def lamp2(self): master2 = Tk() master2.minsize(width = 800, height", "Soka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[39] text2 = Text(master2, font = \"Arial", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gangan Asam Banjar.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "image=img) text1 = lst[61] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= lst[67] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "command=self.malut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jambi2(self): master2 = Tk() master2.minsize(width = 800,", "column = 6, columnspan = 4) master0.mainloop() def kalsel1(self): master2 = Tk() master2.minsize(width", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2,", "kalsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31]", "master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Pempek Palembang',", "Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3] text2 = Text(master2, font = \"Arial", "= lst[28] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas = Canvas(master2, width = 300, height", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[4]", "image=img) text1 = lst[52] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "img = PhotoImage(master = canvas,file=\"Sinonggi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[58] text2 =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.kaltim1, width", "text1) text2.pack() master2.mainloop() def dki2(self): master2 = Tk() master2.minsize(width = 800, height =", "= Button(master0, text='<NAME>', command=self.bali1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\")", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def dki1(self):", "__prov29(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self): master2 = Tk() master2.minsize(width = 800,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas", "column = 3, columnspan = 4) self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width =", "text1 = lst[38] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= 4) master0.mainloop() def jambi1(self): master2 = Tk() master2.minsize(width = 800, height =", "column = 7, columnspan = 4) self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov32(self): master0 = Tk() master0.minsize(width", "text='<NAME>', command=self.kbang1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "columnspan = 4) master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width = 25, height=3)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self): master0", "canvas,file=\"Bika Ambon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[3] text2 = Text(master2, font =", "= 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas = Canvas(master2, width", "= 6, columnspan = 4) master0.mainloop() def malu1(self): master2 = Tk() master2.minsize(width =", "command=self.__prov5, width = 25) self.bprov5.grid(row = 5, column = 3, columnspan = 4)", "command=self.__prov7, width = 25) self.bprov7.grid(row = 7, column = 3, columnspan = 4)", "canvas.pack() img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15]", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Maluku Utara\") canvas =", "= canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15] text2 = Text(master2, font", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23] text2 = Text(master2, font = \"Arial 12\")", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Apang", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[67] text2 = Text(master2, font = \"Arial 12\")", "lst[30] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= 2, column = 3, columnspan = 4) self.bprov3 = Button(self.master, text='Sumatera Barat',", "self.bprov9.grid(row = 9, column = 3, columnspan = 4) self.bprov10 = Button(self.master, text='Lampung',", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bengkulu\") canvas", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61]", "canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10] text2", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width =", "columnspan = 4) master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width = 25, height=3)", "600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas = Canvas(master2, width = 300, height =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[54]", "= 1, column = 6, columnspan = 4) master0.mainloop() def sulsel1(self): master2 =", "def __prov29(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row = 1, column =", "master2.mainloop() def __prov20(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width", "text='Kalumpe', command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self): master2 = Tk()", "= 25) self.bprov21.grid(row = 4, column = 7, columnspan = 4) self.bprov22 =", "column = 7, columnspan = 4) self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width", "columnspan = 4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width = 25) self.bprov25.grid(row =", "text1) text2.pack() master2.mainloop() def __prov12(self): master0 = Tk() master0.minsize(width = 450, height =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntb2(self):", "6, columnspan = 4) master0.mainloop() def ntb1(self): master2 = Tk() master2.minsize(width = 800,", "= 600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas = Canvas(master2, width = 300, height", "canvas.pack() img = PhotoImage(master = canvas,file=\"Dorokdok.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[25] text2", "= \"Arial 16 bold\") self.master.judul.grid(row = 0, column = 3, columnspan = 8)", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalteng2(self):", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width =", "= 4) master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25, height=3) master0.bprov1.grid(row", "columnspan = 4) self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "4) master0.mainloop() def jatim1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "text1) text2.pack() master2.mainloop() def __prov21(self): master0 = Tk() master0.minsize(width = 450, height =", "master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34] text2 = Text(master2,", "PhotoImage(master = canvas,file=\"Bilenthango.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[49] text2 = Text(master2, font", "= Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "text1) text2.pack() master2.mainloop() def __prov11(self): master0 = Tk() master0.minsize(width = 450, height =", "ban1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36] text2 = Text(master2, font = \"Arial", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self): master2", "column = 6, columnspan = 4) master0.mainloop() def pabar1(self): master2 = Tk() master2.minsize(width", "0, column = 3, columnspan = 8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width", "4) master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width = 25, height=3) master0.bprov1.grid(row =", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def beng2(self): master2 =", "= lst[33] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "Button(master0, text='<NAME>', command=self.sumut1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\")", "columnspan = 4) master0.mainloop() def sumsel1(self): master2 = Tk() master2.minsize(width = 800, height", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self): master0 = Tk() master0.minsize(width", "master2.title(\"Makanan tradisional Aceh\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack()", "2, column = 7, columnspan = 4) self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20,", "1, column = 6, columnspan = 4) master0.mainloop() def sulteng1(self): master2 = Tk()", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width =", "text1) text2.pack() master2.mainloop() def jatim2(self): master2 = Tk() master2.minsize(width = 800, height =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ntt2(self): master2 = Tk() master2.minsize(width = 800,", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width =", "4) self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width = 25) self.bprov17.grid(row = 17, column", "__prov32(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "Button(master0, text='<NAME>', command=self.ban2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua\") canvas", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Manday.png\") canvas.create_image(5,5,", "width = 25) self.bprov6.grid(row = 6, column = 3, columnspan = 4) self.bprov7", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Coto Makassar.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= lst[2] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "4) self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25) self.bprov12.grid(row = 12,", "text1 = lst[45] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[18] text2 = Text(master2, font =", "img = PhotoImage(master = canvas,file=\"Tekwan Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[15] text2", "text='Kalimantan Selatan', command=self.__prov23, width = 25) self.bprov23.grid(row = 6, column = 7, columnspan", "text='Maluku Utara', command=self.__prov31, width = 25) self.bprov31.grid(row = 14, column = 7, columnspan", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Betutu.png\")", "= 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas = Canvas(master2, width", "= 4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row =", "canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13]", "text1 = lst[20] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "= PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26] text2 = Text(master2,", "4) master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def ban2(self): master2 = Tk() master2.minsize(width = 800, height", "lst[53] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "= lst[23] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self):", "Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[40] text2 = Text(master2, font = \"Arial", "4) master0.mainloop() def jambi1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self): master0", "Button(master0, text='<NAME>u', command=self.bali2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "= Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25) self.bprov5.grid(row = 5, column =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def ntb1(self):", "4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= 600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas = Canvas(master2, width = 300, height", "anchor=NW, image=img) text1 = lst[43] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "9, column = 7, columnspan = 4) self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27,", "text2.pack() master2.mainloop() def __prov2(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 3, columnspan = 4) self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width =", "4) self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width = 25) self.bprov6.grid(row = 6, column", "__prov1(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "7, columnspan = 4) self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25)", "command=self.sulteng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width", "ntt1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "master2.mainloop() def jambi2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[17] text2 = Text(master2, font = \"Arial 12\")", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas =", "Sulawesi Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "text1) text2.pack() master2.mainloop() def kalsel2(self): master2 = Tk() master2.minsize(width = 800, height =", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self): master2 = Tk() master2.minsize(width = 800, height", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kalumpe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "= 4) master0.mainloop() def jabar1(self): master2 = Tk() master2.minsize(width = 800, height =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self): master2 = Tk() master2.minsize(width = 800,", "text='Sate Bulayak', command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25) self.bprov24.grid(row = 7, column", "600) master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2, width = 300, height = 300)", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov9(self): master0 = Tk() master0.minsize(width", "= 14, column = 3, columnspan = 4) self.bprov15 = Button(self.master, text='DI Yogyakarta',", "25) self.bprov17.grid(row = 17, column = 3, columnspan = 4) self.bprov18 = Button(self.master,", "command=self.aceh2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "master0.bprov1 = Button(master0, text='<NAME>', command=self.riau1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "master2.title(\"Makanan tradisional Jawa Barat\") canvas = Canvas(master2, width = 300, height = 300)", "= 1, column = 6, columnspan = 4) master0.mainloop() def bali1(self): master2 =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11] text2 = Text(master2, font = \"Arial 12\")", "= 1, column = 6, columnspan = 4) master0.mainloop() def sulteng1(self): master2 =", "height = 600) master2.title(\"Makanan tradisional Banten\") canvas = Canvas(master2, width = 300, height", "columnspan = 4) master0.mainloop() def kaltim1(self): master2 = Tk() master2.minsize(width = 800, height", "image=img) text1 = lst[14] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov34(self): master0 = Tk() master0.minsize(width =", "= 600) master2.title(\"Makanan tradisional Jawa Timur\") canvas = Canvas(master2, width = 300, height", "= lst[12] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "text1 = lst[65] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kriau2(self): master2 = Tk() master2.minsize(width = 800,", "1, column = 6, columnspan = 4) master0.mainloop() def riau1(self): master2 = Tk()", "canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[19] text2 = Text(master2, font =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2,", "columnspan = 4) master0.mainloop() def diy1(self): master2 = Tk() master2.minsize(width = 800, height", "width = 25) self.bprov13.grid(row = 13, column = 3, columnspan = 4) self.bprov14", "16 bold\") self.master.judul.grid(row = 0, column = 3, columnspan = 8) self.bprov1 =", "PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32] text2 = Text(master2,", "Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32] text2 = Text(master2, font = \"Arial", "= canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37] text2 = Text(master2, font", "1, column = 6, columnspan = 4) master0.mainloop() def kalut1(self): master2 = Tk()", "kalut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "column = 6, columnspan = 4) master0.mainloop() def kalteng1(self): master2 = Tk() master2.minsize(width", "= lst[19] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def pabar1(self):", "column = 7, columnspan = 4) self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width", "= canvas,file=\"Padamaran.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[11] text2 = Text(master2, font =", "text2.pack() master2.mainloop() def __prov30(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "= 4) master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3) master0.bprov2.grid(row =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gatang Kenari.png\") canvas.create_image(5,5,", "width = 25) self.bprov11.grid(row = 11, column = 3, columnspan = 4) self.bprov12", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34] text2 =", "= 25) self.bprov9.grid(row = 9, column = 3, columnspan = 4) self.bprov10 =", "= 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Utara\") canvas = Canvas(master2, width", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kepiting", "Kenari.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[60] text2 = Text(master2, font = \"Arial", "master2.minsize(width = 800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto Kudus',", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov7(self): master0 = Tk() master0.minsize(width = 450,", "columnspan = 4) master0.mainloop() def kbang1(self): master2 = Tk() master2.minsize(width = 800, height", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width = 25,", "4) master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 4) master0.mainloop() def beng1(self): master2 = Tk() master2.minsize(width = 800, height =", "image=img) text1 = lst[37] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kaltim1(self): master2", "master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "= PhotoImage(master = canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63] text2 =", "height = 600) master2.title(\"Makanan tradisional Papua Barat\") canvas = Canvas(master2, width = 300,", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kepulauan Riau\") canvas =", "def sulut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "Button(master0, text='Mie Aceh', command=self.aceh1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row = 1,", "4) master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width = 25, height=3) master0.bprov2.grid(row", "def sumut1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Utara\") canvas = Canvas(master2, width", "columnspan = 4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width = 25) self.bprov7.grid(row =", "Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "width = 25) self.bprov30.grid(row = 13, column = 7, columnspan = 4) self.bprov31", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Martabak Bangka.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img)", "lst[36] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "columnspan = 4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3) master0.bprov1.grid(row", "4) self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width = 25) self.bprov10.grid(row = 10, column", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width =", "PhotoImage(master = canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26] text2 = Text(master2, font", "canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[13] text2 = Text(master2, font =", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov20(self): master0 = Tk() master0.minsize(width", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalut2(self): master2 = Tk() master2.minsize(width", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Padamaran.png\") canvas.create_image(5,5,", "goron2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51]", "img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[61] text2", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1,", "master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def papua1(self): master2", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar Hiu.png\") canvas.create_image(5,5, anchor=NW,", "def jatim1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Button(master0, text='<NAME>', command=self.kalbar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6,", "1, column = 6, columnspan = 4) master0.mainloop() def ntb1(self): master2 = Tk()", "def kalbar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "def __prov27(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width = 25, height=3) master0.bprov1.grid(row =", "def diy2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Timur\")", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Kaledo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[55]", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self): master2 = Tk() master2.minsize(width", "tradisional BAnten\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def sumut1(self):", "= canvas,file=\"Soto Betawi.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[20] text2 = Text(master2, font", "height = 600) master2.title(\"Makanan tradisional NTT\") canvas = Canvas(master2, width = 300, height", "4) master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width = 25, height=3) master0.bprov1.grid(row", "anchor=NW, image=img) text1 = lst[63] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Taliwang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[34]", "canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self): master2 = Tk() master2.minsize(width = 800,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def dki2(self): master2", "800, height = 600) master2.title(\"Makanan tradisional NTB\") canvas = Canvas(master2, width = 300,", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12] text2 = Text(master2, font = \"Arial 12\")", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov28(self):", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov14(self): master0 =", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.jabar1, width", "jatim2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalut2, width = 25, height=3)", "= PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12] text2 =", "canvas,file=\"W<NAME>in.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[43] text2 = Text(master2, font = \"Arial", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self): master2 = Tk() master2.minsize(width = 800,", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.papua1, width = 25, height=3) master0.bprov1.grid(row =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Seruit Lampung.png\") canvas.create_image(5,5, anchor=NW,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Sumsum.png\")", "= 4) master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width = 25, height=3) master0.bprov2.grid(row =", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov16(self): master0 = Tk() master0.minsize(width = 450,", "image=img) text1 = lst[44] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= canvas,file=\"Kohu Kohu.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[63] text2 = Text(master2, font", "txt += cmp cmp = f.readline() lst.append(txt) line = f.readline() class DirektoriMakanan(): def", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa TEngah\")", "Jawa TEngah\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Bekepor.png\") canvas.create_image(5,5, anchor=NW, image=img)", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malut2(self): master2 = Tk() master2.minsize(width =", "jabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[56] text2 = Text(master2, font =", "4) master0.mainloop() def kalsel1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "command=self.kalut2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "= 4) master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25, height=3) master0.bprov2.grid(row", "image=img) text1 = lst[29] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36] text2 = Text(master2,", "font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self): master0 = Tk()", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Gangan Asam", "6, column = 3, columnspan = 4) self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width", "__prov13(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Selatan\") canvas =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "canvas,file=\"Tiwul.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[26] text2 = Text(master2, font = \"Arial", "Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25, height=3) master0.bprov1.grid(row = 1, column =", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 =", "800, height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional NTT\")", "PhotoImage(master = canvas,file=\"Tapa Kolo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[37] text2 = Text(master2,", "columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.jambi2, width = 25, height=3) master0.bprov2.grid(row", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Apang Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img)", "text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self): master2 = Tk() master2.minsize(width = 800, height", "= 4) master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width = 25, height=3)", "= Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalbar2(self):", "16 bold\") master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 =", "= Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25) self.bprov3.grid(row = 3, column =", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Dekke Na Niura.png\") canvas.create_image(5,5, anchor=NW, image=img)", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional DKI Jakarta\")", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[36]", "columnspan = 4) master0.mainloop() def kriau1(self): master2 = Tk() master2.minsize(width = 800, height", "14, column = 7, columnspan = 4) self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalbar1(self):", "= Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Tengah\") canvas =", "= 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat \") canvas = Canvas(master2,", "text2.pack() master2.mainloop() def __prov12(self): master0 = Tk() master0.minsize(width = 450, height = 100)", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Aceh\") canvas", "Selatan\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "800, height = 600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas = Canvas(master2, width =", "= 8) self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width = 25) self.bprov1.grid(row = 1,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def pabar2(self): master2 = Tk() master2.minsize(width", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1,", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width", "= canvas,file=\"Pempek Palembang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[14] text2 = Text(master2, font", "text2.pack() master2.mainloop() def jateng2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def malu2(self):", "0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Gorontalo\") canvas", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat \") canvas =", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tinutuan.png\") canvas.create_image(5,5, anchor=NW, image=img)", "columnspan = 4) self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25) self.bprov5.grid(row", "command=self.riau2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "img = PhotoImage(master = canvas,file=\"<NAME>.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[24] text2 =", "PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9] text2 = Text(master2,", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width", "sulbar2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "canvas,file=\"Gong Gong.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[9] text2 = Text(master2, font =", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov11(self): master0", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width", "text='Sulawesi Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row = 11, column = 7, columnspan", "= 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.malut1, width = 25, height=3) master0.bprov1.grid(row =", "= 15, column = 3, columnspan = 4) self.bprov16 = Button(self.master, text='Jawa Timur',", "PhotoImage(master = canvas,file=\"Udang Selingkuh.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[66] text2 = Text(master2,", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Aceh\") canvas =", "6, columnspan = 4) master0.mainloop() def kbang1(self): master2 = Tk() master2.minsize(width = 800,", "columnspan = 4) master0.mainloop() def jambi1(self): master2 = Tk() master2.minsize(width = 800, height", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Belaca Belitung.png\") canvas.create_image(5,5, anchor=NW,", "4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sulut2(self): master2", "def __prov33(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width =", "__prov23(self): master0 = Tk() master0.minsize(width = 450, height = 100) master0.maxsize(width = 450,", "image=img) text1 = lst[34] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 600) master2.title(\"Makanan tradisional Bali\") canvas = Canvas(master2, width = 300, height =", "= Button(master0, text='Lepek Binti', command=self.beng1, width = 25, height=3) master0.bprov1.grid(row = 1, column", "4) master0.mainloop() def jabar1(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Lilit.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[32]", "Biluhuta', command=self.goron1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "3, columnspan = 4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row", "= canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10] text2 = Text(master2, font =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Barat\") canvas =", "master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas = Canvas(master2, width = 300, height = 300)", "self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width = 25) self.bprov18.grid(row = 1, column =", "column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.sumut1, width =", "Binti.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[12] text2 = Text(master2, font = \"Arial", "text='<NAME>', command=self.kaltim1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Nasi Lapola.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Binte Biluhuta.png\") canvas.create_image(5,5,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def sumsel2(self): master2", "= Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row = 13, column =", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua Barat\") canvas", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10] text2 = Text(master2, font = \"Arial 12\")", "image=img) text1 = lst[1] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Bulayak',", "4) master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row = 1,", "4) self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row = 14,", "image=img) text1 = lst[22] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1)", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self): master2 = Tk() master2.minsize(width = 800,", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def jatim2(self): master2 = Tk() master2.minsize(width", "= canvas,file=\"Sate Bulayak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[35] text2 = Text(master2, font", "text1 = lst[12] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def beng1(self):", "= \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov12(self): master0 = Tk() master0.minsize(width", "= lst[31] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self): master0 = Tk() master0.minsize(width =", "height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4) master0.mainloop() def kalteng1(self):", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"<NAME>oyak.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 800, height = 600) master2.title(\"Makanan tradisional Sumatera Utara\") canvas = Canvas(master2, width", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Bekekpor',", "4) master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3) master0.bprov2.grid(row = 1,", "text1 = lst[9] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25, height=3) master0.bprov2.grid(row = 1,", "canvas.pack() img = PhotoImage(master = canvas,file=\"Soto Kudus.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[28]", "columnspan = 4) master0.bprov1 = Button(master0, text='<NAME>', command=self.ntb1, width = 25, height=3) master0.bprov1.grid(row", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov26(self): master0 =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Sulawesi Tenggara\") canvas =", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.malut2,", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Coto Makassar.png\")", "canvas.pack() img = PhotoImage(master = canvas,file=\"Sate Bandeng.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[23]", "text2.pack() master2.mainloop() def sulgar2(self): master2 = Tk() master2.minsize(width = 800, height = 600)", "img = PhotoImage(master = canvas,file=\"Rawon.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[31] text2 =", "600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Jawa TEngah\") canvas =", "= 600) master2.title(\"Makanan tradisional Lampung\") canvas = Canvas(master2, width = 300, height =", "master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column", "height = 600) master2.title(\"Makanan tradisional Sumatera Barat\") canvas = Canvas(master2, width = 300,", "text='Rendang', command=self.sumbar1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan", "command=self.__prov25, width = 25) self.bprov25.grid(row = 8, column = 7, columnspan = 4)", "height = 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Banten\") canvas", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width =", "command=self.jatim1, width = 25, height=3) master0.bprov1.grid(row = 1, column = 2, columnspan =", "anchor=NW, image=img) text1 = lst[31] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25,", "= 4) self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25) self.bprov16.grid(row =", "Jawa Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "PhotoImage(master = canvas,file=\"Kue Timpan.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1] text2 = Text(master2,", "= lst[10] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Rendang.png\") canvas.create_image(5,5,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Tempoyak.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[10]", "self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25) self.bprov28.grid(row = 11, column", "canvas.pack() img = PhotoImage(master = canvas,file=\"Klapertaart.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[50] text2", "sulteng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width = 800,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kalsel2(self):", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def goron2(self): master2 =", "anchor=NW, image=img) text1 = lst[16] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT,", "self.bprov8.grid(row = 8, column = 3, columnspan = 4) self.bprov9 = Button(self.master, text='Kepulauan", "text='Jawa Timur', command=self.__prov16, width = 25) self.bprov16.grid(row = 16, column = 3, columnspan", "command=self.__prov17, width = 25) self.bprov17.grid(row = 17, column = 3, columnspan = 4)", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sambal", "text1) text2.pack() master2.mainloop() def ntb2(self): master2 = Tk() master2.minsize(width = 800, height =", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gulai Taboh.png\") canvas.create_image(5,5,", "= lst[35] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop()", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width =", "6, columnspan = 4) master0.mainloop() def kalsel1(self): master2 = Tk() master2.minsize(width = 800,", "column = 6, columnspan = 4) master0.mainloop() def jambi1(self): master2 = Tk() master2.minsize(width", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sei.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "= 4) master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3) master0.bprov2.grid(row =", "= 4) self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width = 25) self.bprov11.grid(row = 11,", "300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bubur Paddas Sambas.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "Bugis.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[52] text2 = Text(master2, font = \"Arial", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Uta Kelo.png\") canvas.create_image(5,5, anchor=NW,", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self): master0 =", "command=self.ban2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "lst[8] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[46] text2 = Text(master2, font = \"Arial 12\")", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Ayam Cincane.png\") canvas.create_image(5,5, anchor=NW, image=img)", "= 600) master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Kalimantan Timur\") canvas", "= 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Sop Konro.png\") canvas.create_image(5,5, anchor=NW, image=img) text1", "height=3) master0.bprov1.grid(row = 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0,", "1, column = 6, columnspan = 4) master0.mainloop() def kalbar1(self): master2 = Tk()", "600) master2.title(\"Makanan tradisional Sulawesi Tengah\") canvas = Canvas(master2, width = 300, height =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[1] text2 = Text(master2, font = \"Arial 12\")", "master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width = 25, height=3) master0.bprov2.grid(row = 1,", "= 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gong Gong.png\") canvas.create_image(5,5,", "Papua Barat\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img", "text1) text2.pack() master2.mainloop() def __prov9(self): master0 = Tk() master0.minsize(width = 450, height =", "height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Jadah Tempe.png\") canvas.create_image(5,5, anchor=NW, image=img)", "height = 600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas = Canvas(master2, width = 300,", "800, height = 600) master2.title(\"Makanan tradisional Maluku\") canvas = Canvas(master2, width = 300,", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Gong", "= 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1,", "2, columnspan = 4) master0.bprov1 = Button(master0, text=\"Se'i\", command=self.ntt1, width = 25, height=3)", "600) master2.title(\"Makanan tradisional Riau\") canvas = Canvas(master2, width = 300, height = 300)", "600) master2.title(\"Makanan tradisional Kalimantan Tengha\") canvas = Canvas(master2, width = 300, height =", "text1) text2.pack() master2.mainloop() def kaltim2(self): master2 = Tk() master2.minsize(width = 800, height =", "canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[51] text2 = Text(master2, font = \"Arial 12\")", "self.bprov34.grid(row = 17, column = 7, columnspan = 4) self.master.mainloop() def __prov1(self): master0", "= f.readline() cmp = '' txt = '' while('<end>' not in cmp): txt", "text1) text2.pack() master2.mainloop() def sumut2(self): master2 = Tk() master2.minsize(width = 800, height =", "Kemojo.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[6] text2 = Text(master2, font = \"Arial", "Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def riau2(self): master2 =", "master2.title(\"Makanan tradisional Kalimantan Barat\") canvas = Canvas(master2, width = 300, height = 300)", "lst[63] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def", "text1 = lst[30] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "Bulayak', command=self.ntb2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan", "= canvas,file=\"Luti Gendang.png\") canvas.create_image(5,5, anchor=NW, image=img) text1 = lst[8] text2 = Text(master2, font", "Canvas(master2, width = 300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Bagar", "4) master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3) master0.bprov1.grid(row = 1,", "= 1, column = 6, columnspan = 4) master0.mainloop() def beng1(self): master2 =", "2, columnspan = 4) master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3)", "command=self.jabar2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "text = \"Pilih makanan :)\", font = \"Arial 16 bold\") master0.judul.grid(row = 0,", "command=self.beng2, width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan =", "master2.mainloop() def jateng2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width", "800, height = 600) master2.title(\"Makanan tradisional Sulawesi Selatan\") canvas = Canvas(master2, width =", "column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width", "master0.judul.grid(row = 0, column = 2, columnspan = 4) master0.bprov1 = Button(master0, text='Soto", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Bengkulu\") canvas = Canvas(master2, width", "= 1, column = 2, columnspan = 4) master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2,", "columnspan = 4) self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25) self.bprov13.grid(row", "300, height = 300) canvas.pack() img = PhotoImage(master = canvas,file=\"Lepek Binti.png\") canvas.create_image(5,5, anchor=NW,", "= 7, column = 7, columnspan = 4) self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25,", "width = 25, height=3) master0.bprov2.grid(row = 1, column = 6, columnspan = 4)", "text1 = lst[16] text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack()", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def kbang2(self): master2 = Tk() master2.minsize(width =", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov19(self):", "= 2, columnspan = 4) master0.bprov2 = Button(master0, text='<NAME>', command=self.kalteng2, width = 25,", "\"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov33(self): master0 = Tk() master0.minsize(width =", "Banten\") canvas = Canvas(master2, width = 300, height = 300) canvas.pack() img =", "800, height = 600) master2.title(\"Makanan tradisional Sulawesi Barat\") canvas = Canvas(master2, width =", "def sulsel2(self): master2 = Tk() master2.minsize(width = 800, height = 600) master2.maxsize(width =", "master2.maxsize(width = 800, height = 600) master2.title(\"Makanan tradisional Papua Barat\") canvas = Canvas(master2,", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov5(self):", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov6(self): master0", "text='Jawa Tengah', command=self.__prov14, width = 25) self.bprov14.grid(row = 14, column = 3, columnspan", "7, columnspan = 4) self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25)", "text2 = Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def aceh2(self):", "12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov8(self): master0 = Tk() master0.minsize(width = 450,", "= Text(master2, font = \"Arial 12\") text2.insert(INSERT, text1) text2.pack() master2.mainloop() def __prov17(self): master0", "= 25) self.bprov6.grid(row = 6, column = 3, columnspan = 4) self.bprov7 =", "= 1, column = 6, columnspan = 4) master0.mainloop() def ntb1(self): master2 =", "text='Sumatera Selatan', command=self.__prov8, width = 25) self.bprov8.grid(row = 8, column = 3, columnspan", "width = 25) self.bprov5.grid(row = 5, column = 3, columnspan = 4) self.bprov6" ]
[ "1000) def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11]) def", "assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8)) def test_a_very_big_sum(): assert", "[2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8)) def test_a_very_big_sum(): assert 5000000015", "\" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1,", "def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured = capsys.readouterr() output = \"10", "-9, 0, 4, 1]) captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out ==", "= \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output", "q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11])", "output = \" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out == output def", "6], [10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1]) captured", "= capsys.readouterr() output = \" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out ==", "from problem_solving.algorithms.warmup import * def test_solve_me_first(): assert 5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100", "test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert 15", "#####\\n######\\n\" assert captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured", "69, 2, 221, 8974]) captured = capsys.readouterr() output = \"299 9271\\n\" assert captured.out", "output = \"10 14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974])", "1000000004, 1000000005]) def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6],", "\"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output =", "assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert 15 ==", "capsys.readouterr() output = \"299 9271\\n\" assert captured.out == output def test_birthday_cake_candles(): assert 2", "test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10)) assert [2,", "\"299 9271\\n\" assert captured.out == output def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2,", "test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output = \" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\"", "= \"10 14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured", "assert captured.out == output def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output = \"", "= \" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out == output def test_mini_max_sum(capsys):", "capsys.readouterr() output = \"10 14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221,", "def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert", "16, 8)) def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def", "11]) def test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10))", "assert captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured =", "== q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11]) def test_compare_the_triplets(): assert [1, 1] ==", "assert 5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert", "2, 3, 4, 10, 11]) def test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6,", "= capsys.readouterr() output = \"10 14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2,", "1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17,", "q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11]) def test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5,", "== output def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1, 3]) def test_time_conversion():", "import * def test_solve_me_first(): assert 5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100,", "output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured = capsys.readouterr() output = \"299 9271\\n\"", "captured = capsys.readouterr() output = \"299 9271\\n\" assert captured.out == output def test_birthday_cake_candles():", "== output def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output = \" #\\n ##\\n", "captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def test_staircase(capsys): q7_staircase.staircase(6)", "output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr()", "== q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2,", "def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6], [10, 8,", "#\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2,", "== q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8)) def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001,", "q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured = capsys.readouterr() output = \"10 14\\n\" assert", "[1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10)) assert [2, 1] ==", "1]) captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def test_staircase(capsys):", "def test_solve_me_first(): assert 5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def", "5]) captured = capsys.readouterr() output = \"10 14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7,", "(3, 6, 10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8))", "8974]) captured = capsys.readouterr() output = \"299 9271\\n\" assert captured.out == output def", "5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31", "* def test_solve_me_first(): assert 5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000)", "== q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28,", "###\\n ####\\n #####\\n######\\n\" assert captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4,", "####\\n #####\\n######\\n\" assert captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5])", "q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1,", "221, 8974]) captured = capsys.readouterr() output = \"299 9271\\n\" assert captured.out == output", "assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11]) def test_compare_the_triplets(): assert [1,", "8)) def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference():", "9271\\n\" assert captured.out == output def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1,", "assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1, 3]) def test_time_conversion(): assert \"19:05:45\" == q10_time_conversion.timeConversion(\"07:05:45PM\")", "1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8)) def test_a_very_big_sum(): assert 5000000015 ==", "assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10)) assert [2, 1]", "output = \"299 9271\\n\" assert captured.out == output def test_birthday_cake_candles(): assert 2 ==", "1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5,", "def test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10)) assert", "31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11]) def test_compare_the_triplets(): assert [1, 1]", "assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3,", "1000000005]) def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6], [10,", "capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def test_staircase(capsys): q7_staircase.staircase(6) captured =", "3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2,", "== output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured = capsys.readouterr() output", "captured = capsys.readouterr() output = \"10 14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69,", "def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1, 3]) def test_time_conversion(): assert \"19:05:45\"", "3, 4, 5]) captured = capsys.readouterr() output = \"10 14\\n\" assert captured.out ==", "-12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1]) captured = capsys.readouterr() output", "captured.out == output def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output = \" #\\n", "output def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1, 3]) def test_time_conversion(): assert", "5, 6], [10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1])", "1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4,", "== q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10,", "test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11]) def test_compare_the_triplets(): assert", "assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6], [10, 8, -12]]) def", "4, 5]) captured = capsys.readouterr() output = \"10 14\\n\" assert captured.out == output", "= \"299 9271\\n\" assert captured.out == output def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3,", "test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured = capsys.readouterr() output = \"10 14\\n\"", "28, 30), (99, 16, 8)) def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003,", "4, 10, 11]) def test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3,", "6, 7), (3, 6, 10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99,", "30), (99, 16, 8)) def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004,", "1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4,", "2, 221, 8974]) captured = capsys.readouterr() output = \"299 9271\\n\" assert captured.out ==", "3, -9, 0, 4, 1]) captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out", "test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1]) captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\"", "q7_staircase.staircase(6) captured = capsys.readouterr() output = \" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert", "[10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1]) captured =", "2, 3, 4, 5]) captured = capsys.readouterr() output = \"10 14\\n\" assert captured.out", "def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1]) captured = capsys.readouterr() output =", "== q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6], [10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4,", "##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3,", "= capsys.readouterr() output = \"299 9271\\n\" assert captured.out == output def test_birthday_cake_candles(): assert", "0, 4, 1]) captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output", "4, 1]) captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def", "q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6, 10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30),", "assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured = capsys.readouterr() output", "10, 11]) def test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7), (3, 6,", "output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured = capsys.readouterr() output =", "captured.out == output def test_mini_max_sum(capsys): q8_mini_max_sum.miniMaxSum([1, 2, 3, 4, 5]) captured = capsys.readouterr()", "2, 4], [4, 5, 6], [10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9,", "= capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert captured.out == output def test_staircase(capsys): q7_staircase.staircase(6) captured", "test_solve_me_first(): assert 5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum():", "capsys.readouterr() output = \" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out == output", "def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output = \" #\\n ##\\n ###\\n ####\\n", "3, 4, 10, 11]) def test_compare_the_triplets(): assert [1, 1] == q3_compare_the_triplets.compareTriplets((5, 6, 7),", "== output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured = capsys.readouterr() output = \"299", "test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1, 3]) def test_time_conversion(): assert \"19:05:45\" ==", "def test_simple_array_sum(): assert 31 == q2_simple_array_sum.simpleArraySum([1, 2, 3, 4, 10, 11]) def test_compare_the_triplets():", "\"10 14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured =", "q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4],", "assert captured.out == output def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1, 3])", "output def test_staircase(capsys): q7_staircase.staircase(6) captured = capsys.readouterr() output = \" #\\n ##\\n ###\\n", "q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8)) def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002,", "5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005]) def test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11,", "[4, 5, 6], [10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4,", "q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1]) captured = capsys.readouterr() output = \"0.500000\\n0.333333\\n0.166667\\n\" assert", "q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured = capsys.readouterr() output = \"299 9271\\n\" assert", "problem_solving.algorithms.warmup import * def test_solve_me_first(): assert 5 == q1_solve_me_first.solveMeFirst(2, 3) assert 1100 ==", "(99, 16, 8)) def test_a_very_big_sum(): assert 5000000015 == q4_a_very_big_sum.aVeryBigSum([1000000001, 1000000002, 1000000003, 1000000004, 1000000005])", "captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured = capsys.readouterr() output =", "== q1_solve_me_first.solveMeFirst(2, 3) assert 1100 == q1_solve_me_first.solveMeFirst(100, 1000) def test_simple_array_sum(): assert 31 ==", "4], [4, 5, 6], [10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0,", "test_diagonal_difference(): assert 15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6], [10, 8, -12]])", "10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8)) def test_a_very_big_sum():", "7), (3, 6, 10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16,", "8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3, -9, 0, 4, 1]) captured = capsys.readouterr()", "14\\n\" assert captured.out == output q8_mini_max_sum.miniMaxSum([7, 69, 2, 221, 8974]) captured = capsys.readouterr()", "6, 10)) assert [2, 1] == q3_compare_the_triplets.compareTriplets((17, 28, 30), (99, 16, 8)) def", "15 == q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6], [10, 8, -12]]) def test_plus_minus(capsys):", "captured = capsys.readouterr() output = \" #\\n ##\\n ###\\n ####\\n #####\\n######\\n\" assert captured.out", "captured.out == output def test_birthday_cake_candles(): assert 2 == q9_birthday_cake_candles.birthdayCakeCandles([3, 2, 1, 3]) def", "q5_diagonal_difference.diagonalDifference([[11, 2, 4], [4, 5, 6], [10, 8, -12]]) def test_plus_minus(capsys): q6_plus_minus.plusMinus([-4, 3," ]
[ "class IZaberFlaskLocalWAMP(object): \"\"\" Allows the creation and sending of calls and functions \"\"\"", "decorator to attach to when someone connects \"\"\" return lambda f: self.on_connect.append(f) def", "options) return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args", "f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator to attach to when someone disconnects \"\"\"", "to attach to when someone authenticateds \"\"\" for f in self.on_authenticated: f(client) def", "to attach to when someone disconnects \"\"\" return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client):", "in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator to attach to when someone", "def do_wamp_disconnect(self,client): \"\"\" A decorator to attach to when someone disconnects \"\"\" for", "decorator to register a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options) return f", "return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args or [], kwargs=kwargs", "self.app = app self.on_connect = [] self.on_disconnect = [] self.on_authenticated = [] def", "attach to when someone connects \"\"\" for f in self.on_connect: f(client) def do_wamp_authenticated(self,client):", "creation and sending of calls and functions \"\"\" def __init__(self,sockets,app): self.sockets = sockets", "= app self.on_connect = [] self.on_disconnect = [] self.on_authenticated = [] def register(self,uri,options=None):", "\"\"\" return lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator to attach to", "{}, topic=topic, args=args or [], kwargs=kwargs or {} )) def wamp_connect(self): \"\"\" A", "decorator to subscribe a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f", "wamp_connect(self): \"\"\" A decorator to attach to when someone connects \"\"\" return lambda", "\"\"\" A decorator to attach to when someone authenticates \"\"\" return lambda f:", "decorator to attach to when someone authenticateds \"\"\" for f in self.on_authenticated: f(client)", "= sockets self.app = app self.on_connect = [] self.on_disconnect = [] self.on_authenticated =", "A decorator to attach to when someone connects \"\"\" for f in self.on_connect:", "connects \"\"\" for f in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator to", "self.on_authenticated = [] def register(self,uri,options=None): \"\"\" A method to use a decorator to", "\"\"\" return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator to attach to", "f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator to attach to when someone authenticateds \"\"\"", "A method to use a decorator to subscribe a callback \"\"\" def actual_subscribe_decorator(f):", "and sending of calls and functions \"\"\" def __init__(self,sockets,app): self.sockets = sockets self.app", "actual_register_decorator(f): self.app.register_local(uri, f, options) return f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method", "A decorator to attach to when someone connects \"\"\" return lambda f: self.on_connect.append(f)", "or [], kwargs=kwargs or {} )) def wamp_connect(self): \"\"\" A decorator to attach", "to register a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options) return f return", "to when someone authenticateds \"\"\" for f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\"", "someone authenticates \"\"\" return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator to", "of calls and functions \"\"\" def __init__(self,sockets,app): self.sockets = sockets self.app = app", "self.app.register_local(uri, f, options) return f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method to", "A decorator to attach to when someone authenticates \"\"\" return lambda f: self.on_authenticated.append(f)", "def do_wamp_connect(self,client): \"\"\" A decorator to attach to when someone connects \"\"\" for", "f, options) return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic,", "decorator to attach to when someone connects \"\"\" for f in self.on_connect: f(client)", "do_wamp_disconnect(self,client): \"\"\" A decorator to attach to when someone disconnects \"\"\" for f", "self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args or [], kwargs=kwargs or {} )) def", ".app import * class IZaberFlaskLocalWAMP(object): \"\"\" Allows the creation and sending of calls", "A decorator to attach to when someone disconnects \"\"\" return lambda f: self.on_disconnect.append(f)", "someone disconnects \"\"\" return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator to", "A method to use a decorator to register a callback \"\"\" def actual_register_decorator(f):", "attach to when someone connects \"\"\" return lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\"", "functions \"\"\" def __init__(self,sockets,app): self.sockets = sockets self.app = app self.on_connect = []", "when someone authenticates \"\"\" return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator", "to attach to when someone connects \"\"\" for f in self.on_connect: f(client) def", "\"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH(", "self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator to attach to when someone connects \"\"\"", "to when someone connects \"\"\" for f in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\"", "def wamp_authenticated(self): \"\"\" A decorator to attach to when someone authenticates \"\"\" return", "{} )) def wamp_connect(self): \"\"\" A decorator to attach to when someone connects", "options) return f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method to use a", "to attach to when someone connects \"\"\" return lambda f: self.on_connect.append(f) def wamp_authenticated(self):", "def wamp_disconnect(self): \"\"\" A decorator to attach to when someone disconnects \"\"\" return", "to use a decorator to register a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f,", "f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args or [],", "\"\"\" A decorator to attach to when someone disconnects \"\"\" return lambda f:", "\"\"\" A decorator to attach to when someone connects \"\"\" for f in", "def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options", ")) def wamp_connect(self): \"\"\" A decorator to attach to when someone connects \"\"\"", "f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator to attach to when someone authenticates", "def do_wamp_authenticated(self,client): \"\"\" A decorator to attach to when someone authenticateds \"\"\" for", "* class IZaberFlaskLocalWAMP(object): \"\"\" Allows the creation and sending of calls and functions", "register a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options) return f return actual_register_decorator", "for f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator to attach to", "\"\"\" def __init__(self,sockets,app): self.sockets = sockets self.app = app self.on_connect = [] self.on_disconnect", "callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None):", "callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options) return f return actual_register_decorator def subscribe(self,uri,options=None):", "[], kwargs=kwargs or {} )) def wamp_connect(self): \"\"\" A decorator to attach to", "\"\"\" A decorator to attach to when someone connects \"\"\" return lambda f:", "f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator to attach to when", "attach to when someone disconnects \"\"\" return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\"", "when someone connects \"\"\" for f in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A", "\"\"\" for f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator to attach", "attach to when someone authenticateds \"\"\" for f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client):", "to when someone connects \"\"\" return lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A", "def register(self,uri,options=None): \"\"\" A method to use a decorator to register a callback", "return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator to attach to when", "when someone authenticateds \"\"\" for f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A", "when someone disconnects \"\"\" return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator", "calls and functions \"\"\" def __init__(self,sockets,app): self.sockets = sockets self.app = app self.on_connect", "self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator to attach to when someone disconnects", "to attach to when someone authenticates \"\"\" return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self):", "return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator to attach to when", "import * class IZaberFlaskLocalWAMP(object): \"\"\" Allows the creation and sending of calls and", "[] def register(self,uri,options=None): \"\"\" A method to use a decorator to register a", "or {} )) def wamp_connect(self): \"\"\" A decorator to attach to when someone", "options=options or {}, topic=topic, args=args or [], kwargs=kwargs or {} )) def wamp_connect(self):", "a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f return actual_subscribe_decorator def", "publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args or [], kwargs=kwargs or {} ))", "[] self.on_authenticated = [] def register(self,uri,options=None): \"\"\" A method to use a decorator", "use a decorator to subscribe a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options)", "self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator to attach to when someone authenticateds", "authenticateds \"\"\" for f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator to", "self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator to attach to when someone disconnects \"\"\"", "authenticates \"\"\" return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator to attach", "self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator to attach to when someone authenticates \"\"\"", "lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator to attach to when someone", "return lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator to attach to when", "from .app import * class IZaberFlaskLocalWAMP(object): \"\"\" Allows the creation and sending of", "\"\"\" return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator to attach to", "= [] def register(self,uri,options=None): \"\"\" A method to use a decorator to register", "someone connects \"\"\" return lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator to", "do_wamp_connect(self,client): \"\"\" A decorator to attach to when someone connects \"\"\" for f", "sending of calls and functions \"\"\" def __init__(self,sockets,app): self.sockets = sockets self.app =", "topic=topic, args=args or [], kwargs=kwargs or {} )) def wamp_connect(self): \"\"\" A decorator", "self.on_disconnect = [] self.on_authenticated = [] def register(self,uri,options=None): \"\"\" A method to use", "def actual_register_decorator(f): self.app.register_local(uri, f, options) return f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A", "def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args or [], kwargs=kwargs or {}", "to when someone disconnects \"\"\" return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A", "to subscribe a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f return", "f, options) return f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method to use", "__init__(self,sockets,app): self.sockets = sockets self.app = app self.on_connect = [] self.on_disconnect = []", "a decorator to register a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options) return", "use a decorator to register a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options)", "def wamp_connect(self): \"\"\" A decorator to attach to when someone connects \"\"\" return", "\"\"\" A decorator to attach to when someone authenticateds \"\"\" for f in", "wamp_authenticated(self): \"\"\" A decorator to attach to when someone authenticates \"\"\" return lambda", "A decorator to attach to when someone authenticateds \"\"\" for f in self.on_authenticated:", "kwargs=kwargs or {} )) def wamp_connect(self): \"\"\" A decorator to attach to when", "decorator to attach to when someone authenticates \"\"\" return lambda f: self.on_authenticated.append(f) def", "Allows the creation and sending of calls and functions \"\"\" def __init__(self,sockets,app): self.sockets", "wamp_disconnect(self): \"\"\" A decorator to attach to when someone disconnects \"\"\" return lambda", "lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator to attach to when someone", "A decorator to attach to when someone disconnects \"\"\" for f in self.on_disconnect:", "decorator to attach to when someone disconnects \"\"\" for f in self.on_disconnect: f(client)", "to use a decorator to subscribe a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f,", "= [] self.on_disconnect = [] self.on_authenticated = [] def register(self,uri,options=None): \"\"\" A method", "\"\"\" Allows the creation and sending of calls and functions \"\"\" def __init__(self,sockets,app):", "<filename>izaber_flask_wamp/wamp.py<gh_stars>0 from .app import * class IZaberFlaskLocalWAMP(object): \"\"\" Allows the creation and sending", "self.app.subscribe_local(uri, f, options) return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {},", "args=args or [], kwargs=kwargs or {} )) def wamp_connect(self): \"\"\" A decorator to", "actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or", "method to use a decorator to subscribe a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri,", "IZaberFlaskLocalWAMP(object): \"\"\" Allows the creation and sending of calls and functions \"\"\" def", "self.sockets = sockets self.app = app self.on_connect = [] self.on_disconnect = [] self.on_authenticated", "disconnects \"\"\" return lambda f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator to attach", "actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args or [], kwargs=kwargs or", "\"\"\" A method to use a decorator to subscribe a callback \"\"\" def", "return f return actual_subscribe_decorator def publish(self,topic,options=None,args=None,kwargs=None): self.app.publish(PUBLISH( options=options or {}, topic=topic, args=args or", "and functions \"\"\" def __init__(self,sockets,app): self.sockets = sockets self.app = app self.on_connect =", "subscribe a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return f return actual_subscribe_decorator", "someone connects \"\"\" for f in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator", "in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator to attach to when someone", "attach to when someone authenticates \"\"\" return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\"", "someone authenticateds \"\"\" for f in self.on_authenticated: f(client) def do_wamp_disconnect(self,client): \"\"\" A decorator", "app self.on_connect = [] self.on_disconnect = [] self.on_authenticated = [] def register(self,uri,options=None): \"\"\"", "f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method to use a decorator to", "connects \"\"\" return lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator to attach", "subscribe(self,uri,options=None): \"\"\" A method to use a decorator to subscribe a callback \"\"\"", "return f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method to use a decorator", "actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method to use a decorator to subscribe a", "\"\"\" for f in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator to attach", "a decorator to subscribe a callback \"\"\" def actual_subscribe_decorator(f): self.app.subscribe_local(uri, f, options) return", "\"\"\" A method to use a decorator to register a callback \"\"\" def", "def __init__(self,sockets,app): self.sockets = sockets self.app = app self.on_connect = [] self.on_disconnect =", "to when someone authenticates \"\"\" return lambda f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A", "lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator to attach to when someone", "the creation and sending of calls and functions \"\"\" def __init__(self,sockets,app): self.sockets =", "a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options) return f return actual_register_decorator def", "def subscribe(self,uri,options=None): \"\"\" A method to use a decorator to subscribe a callback", "\"\"\" def actual_register_decorator(f): self.app.register_local(uri, f, options) return f return actual_register_decorator def subscribe(self,uri,options=None): \"\"\"", "do_wamp_authenticated(self,client): \"\"\" A decorator to attach to when someone authenticateds \"\"\" for f", "self.on_connect = [] self.on_disconnect = [] self.on_authenticated = [] def register(self,uri,options=None): \"\"\" A", "register(self,uri,options=None): \"\"\" A method to use a decorator to register a callback \"\"\"", "sockets self.app = app self.on_connect = [] self.on_disconnect = [] self.on_authenticated = []", "f: self.on_authenticated.append(f) def wamp_disconnect(self): \"\"\" A decorator to attach to when someone disconnects", "when someone connects \"\"\" return lambda f: self.on_connect.append(f) def wamp_authenticated(self): \"\"\" A decorator", "decorator to attach to when someone disconnects \"\"\" return lambda f: self.on_disconnect.append(f) def", "or {}, topic=topic, args=args or [], kwargs=kwargs or {} )) def wamp_connect(self): \"\"\"", "for f in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator to attach to", "f in self.on_connect: f(client) def do_wamp_authenticated(self,client): \"\"\" A decorator to attach to when", "f: self.on_disconnect.append(f) def do_wamp_connect(self,client): \"\"\" A decorator to attach to when someone connects", "[] self.on_disconnect = [] self.on_authenticated = [] def register(self,uri,options=None): \"\"\" A method to", "return actual_register_decorator def subscribe(self,uri,options=None): \"\"\" A method to use a decorator to subscribe", "\"\"\" A decorator to attach to when someone disconnects \"\"\" for f in", "= [] self.on_authenticated = [] def register(self,uri,options=None): \"\"\" A method to use a", "method to use a decorator to register a callback \"\"\" def actual_register_decorator(f): self.app.register_local(uri," ]
[ "\"\"\"Get all the childen Nodes. Generate or get from the memory, all the", "options left = tie break next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos] = color", "(*) required when creating a new root `Node` object. (**) required when creating", "def children(self): \"\"\"Get all the childen Nodes. Generate or get from the memory,", "required when creating a new \"children\" `Node` object (`new_node` method). # Properties UCB1:", "in the first visit of the Node, the value is stored in the", "of the Node, otherwise it will return `False`. Each rollout adds a visit", "# Return float, score of the Node `board` \"\"\" pass def rollout(self): \"\"\"Node", "A new instance of `Node` object. \"\"\" node_type = type(self) node = node_type(board=board,", "number of score is the sum of all scores made by the Node", "draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if winner: return winner", "== None: #no options left = tie break next_pos = helpers.next_position(sim_board, column) sim_board[column,", "parent: `Node` object, required (**), default None - Node above in the tree", "played randomly till the board hits a terminal state then the value is", "else parent.memory self.__score = 0 self._visits = 0 self._children = None super().__init__() if", "visits value: float, total number of score divided by the number of visits", "[] board = self.board for column, row in helpers.available_positions(board): board[column,row] = self.color new_board", "next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos] = color color = 1 if color", "board: matrix, new board state column: int, index of the last column #", "of the simulation or zero in case the simulation ends in a draw.", "new root `Node` object. (**) required when creating a new \"children\" `Node` object", "by the number of visits - the total number of score is the", "implement the `rollout_score` method which is an evaluation of a given state. This", "``` class AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...) while not self.time_out: #", "all the childen Nodes. Generate or get from the memory, all the childen", "method which is an evaluation of a given state. This class uses a", "= helpers.check_winner(board) if winner: return winner sim_board = copy(board) end = False while", "(`self.board`), from the perspective of the player id 1. This method is called", "add_visit(self): self._visits += 1 if self.parent: self.parent.add_visit() def children(self): \"\"\"Get all the childen", "1 @staticmethod def reset(): \"\"\"Reset the depth before start a new search episode.", "return winner sim_board = copy(board) end = False while not end: column =", "color=1): self.board = board self.color = color self.position = position self.parent = parent", "tree. # Return `True` when the rollout occur, `False` when it do not.", "It will execute a rollout if this is the first visits of the", "# Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if winner: return winner sim_board", "column # Return A new instance of `Node` object. \"\"\" node_type = type(self)", "column) sim_board[column, next_pos] = color color = 1 if color == -1 else", "value(self): if self._visits == 0: return 0 return self.__score/self._visits def _save(self): self.memory[self._hash] =", "is returned. # Arguments board: matrix, required, board state to be simulated color:", "self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\" current_depth = 0 deepiest = 0", "matrix, required, board state to be simulated color: int, required, id of the", "(*) of the node. visits: int, total number of Node visits value: float,", "len(self._children) > 0 else None def new_node(self, board, column): \"\"\"This method is called", "node_type = type(self) node = node_type(board=board, parent=self, position=column, color=-self.color) return node def _get_memory(self,", "DepthMeasure.print() return ... ``` \"\"\" current_depth = 0 deepiest = 0 @staticmethod def", "the perspective of the player id 1. This method is called every rollout.", "simulation or zero in case the simulation ends in a draw. # Example", "which is an evaluation of a given state. This class uses a __zobrist", "the counter and the score of the `board` to the Node and all", "+= 1 if self.parent: self.parent.add_visit() def children(self): \"\"\"Get all the childen Nodes. Generate", "the memory, all the childen Nodes. Each node is generated from the available", "hash = self.hash(board, color) if hash in self.memory: return hash else: return def", "parent: self._hash = self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score' not in cls.__dict__: raise", "by the Node and his children. (*) UBC1 is an algorithm which calculates", "row in helpers.available_positions(board): board[column,row] = self.color new_board = deepcopy(board) board[column,row] = 0 node", "scores made by the Node and his children. (*) UBC1 is an algorithm", "node: node = self.new_node(new_board, column) childs.append(node) self._children = childs return self._children if len(self._children)", "end = False while not end: column = self.random_choice(sim_board) if column == None:", "perspective of the player id 1. This method is called every rollout. The", "self.position = position self.parent = parent self.memory = memory if memory is not", "parent.memory self.__score = 0 self._visits = 0 self._children = None super().__init__() if parent:", "position=None, color=1): self.board = board self.color = color self.position = position self.parent =", "= tie break next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos] = color color =", "sim_board[column, next_pos] = color color = 1 if color == -1 else -1", "= [] board = self.board for column, row in helpers.available_positions(board): board[column,row] = self.color", "reset(): \"\"\"Reset the depth before start a new search episode. Save the current", "board state memory: empty dictionary, required(*), default None - this dictionary will store", "required (**), default None - Node above in the tree hierarchy. position: int,", "@staticmethod def reset(): \"\"\"Reset the depth before start a new search episode. Save", "matrix, required, board state memory: empty dictionary, required(*), default None - this dictionary", "to simulate matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate a match to the end", "it is __not__ recalculated during the match. # Return float, score of the", "matrix, new board state column: int, index of the last column # Return", "`Node` object. \"\"\" node_type = type(self) node = node_type(board=board, parent=self, position=column, color=-self.color) return", "method to generate a new Node. # Arguments: board: matrix, new board state", "= node_type(board=board, parent=self, position=column, color=-self.color) return node def _get_memory(self, board, color): hash =", "if color == -1 else -1 winner = helpers.check_winner(sim_board) if winner: return winner", "DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def print(): \"\"\"Print", "object. (**) required when creating a new \"children\" `Node` object (`new_node` method). #", "the tree. # Return `True` when the rollout occur, `False` when it do", "default None - Index of the column which generated the board current `board`.", "helpers.available_positions(board): board[column,row] = self.color new_board = deepcopy(board) board[column,row] = 0 node = self._get_memory(new_board,", "the search. # Arguments board: matrix, required, board state memory: empty dictionary, required(*),", "DepthMeasure: \"\"\"Use this class to help when measuring the depth of a tree", "parent=None, position=None, color=1): self.board = board self.color = color self.position = position self.parent", "def _get_memory(self, board, color): hash = self.hash(board, color) if hash in self.memory: return", "= self.random_choice(sim_board) if column == None: #no options left = tie break next_pos", "not end: column = self.random_choice(sim_board) if column == None: #no options left =", "exploration and exploitation in the Monte Carlo Tree Search strategy. # Example AgentMonteCarlo:", "node = node_type(board=board, parent=self, position=column, color=-self.color) return node def _get_memory(self, board, color): hash", "self._visits += 1 if self.parent: self.parent.add_visit() def children(self): \"\"\"Get all the childen Nodes.", "else: return False def add_score(self, value): self.__score += value if self.parent: self.parent.add_score(value) def", "the zobrist table and it is __not__ recalculated during the match. # Return", "self._hash = self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__,", "add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def reset(): \"\"\"Reset the", "This class has specific methods to perform the Monte Carlo Tree Search. This", "self.memory[self._hash] = self def rollout_score(self): \"\"\"This method must return a score (float), evaluation,", "above in the tree hierarchy. position: int, required (**), default None - Index", "root `Node` object. (**) required when creating a new \"children\" `Node` object (`new_node`", "not self._children: childs = [] board = self.board for column, row in helpers.available_positions(board):", "from . import RandomStrategy from ... import helpers from ...exceptions import BadImplementation class", "a base for a complex node for a Monte Carlo Tree Searches. This", "of the last column # Return A new instance of `Node` object. \"\"\"", "optimize the search. # Arguments board: matrix, required, board state memory: empty dictionary,", "required to implement the `rollout_score` method which is an evaluation of a given", "in the tree. # Return `True` when the rollout occur, `False` when it", "the `rollout_score` method which is an evaluation of a given state. This class", "a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\" current_depth = 0", "== -1 else -1 winner = helpers.check_winner(sim_board) if winner: return winner return 0", "returned. # Arguments board: matrix, required, board state to be simulated color: int,", "self._visits == 0: return math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property", "children(self): \"\"\"Get all the childen Nodes. Generate or get from the memory, all", "this class to help when measuring the depth of a tree search. Useful", "= 0 self._children = None super().__init__() if parent: self._hash = self._gen_hash() self._save() def", "line \"DepthMeasure.add()\" inside the # search method, when it creates a new depth", "``` \"\"\" current_depth = 0 deepiest = 0 @staticmethod def start(): \"\"\"Reset all", "match. # Return float, score of the Node `board` \"\"\" pass def rollout(self):", "a Monte Carlo Tree Searches. This class has specific methods to perform the", "rollout(self): \"\"\"Node rollout. It will execute a rollout if this is the first", "Tree Searches. This class has specific methods to perform the Monte Carlo Tree", "Node and his children. (*) UBC1 is an algorithm which calculates the distribution", "the rollout occur, `False` when it do not. \"\"\" if self.parent and self._visits", "the first visits of the Node, otherwise it will return `False`. Each rollout", "when measuring the depth of a tree search. Useful when debugging and measuring", "0 else None def new_node(self, board, column): \"\"\"This method is called by `children`", "0: return math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self):", "Node, the value is stored in the zobrist table and it is __not__", "...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method necessary to simulate", "in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if self._visits == 0: return", "class is a base for a complex node for a Monte Carlo Tree", "current_depth = 0 deepiest = 0 @staticmethod def start(): \"\"\"Reset all the variables", "calculates the distribution of search effort for exploration and exploitation in the Monte", "method, when it creates a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ```", "class to help when measuring the depth of a tree search. Useful when", "self.parent.add_visit() def children(self): \"\"\"Get all the childen Nodes. Generate or get from the", "or zero in case the simulation ends in a draw. # Example AgentSimulation:", "creating a new root `Node` object. (**) required when creating a new \"children\"", "color: int, required, id of the owner of the board state # Return", "state memory: empty dictionary, required(*), default None - this dictionary will store all", "self.__score += value if self.parent: self.parent.add_score(value) def add_visit(self): self._visits += 1 if self.parent:", "first visit of the Node, the value is stored in the zobrist table", "= parent self.memory = memory if memory is not None else parent.memory self.__score", "AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...) while not self.time_out: # place the", "in the zobrist table and it is __not__ recalculated during the match. #", "for exploration and exploitation in the Monte Carlo Tree Search strategy. # Example", "if winner: return winner return 0 class DepthMeasure: \"\"\"Use this class to help", "import Strategy, RandomStrategy, ZobristHashingStrategy from . import RandomStrategy from ... import helpers from", "= helpers.check_winner(sim_board) if winner: return winner return 0 class DepthMeasure: \"\"\"Use this class", "DepthMeasure.start() self.start_timer(...) while not self.time_out: # place the line \"DepthMeasure.add()\" inside the #", "search episode. Save the current depth if it's the deepiest till know. \"\"\"", "Save the current depth if it's the deepiest till know. \"\"\" if DepthMeasure.current_depth", "optimization. # Example ``` class AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...) while", "method necessary to simulate matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate a match to", "break next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos] = color color = 1 if", "complex node for a Monte Carlo Tree Searches. This class has specific methods", "when it creates a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\"", "\"\"\" def __init__(self, board, memory=None, parent=None, position=None, color=1): self.board = board self.color =", "board hits a terminal state then the value is returned. # Arguments board:", "parents above in the tree. # Return `True` when the rollout occur, `False`", "def add_visit(self): self._visits += 1 if self.parent: self.parent.add_visit() def children(self): \"\"\"Get all the", "parent=self, position=column, color=-self.color) return node def _get_memory(self, board, color): hash = self.hash(board, color)", "the Monte Carlo Tree Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\"", "_save(self): self.memory[self._hash] = self def rollout_score(self): \"\"\"This method must return a score (float),", "\"\"\"Simulation Stragegy provide the method necessary to simulate matches.\"\"\" def simulate(self, board, color=-1):", "his children. (*) UBC1 is an algorithm which calculates the distribution of search", "Node `board` \"\"\" pass def rollout(self): \"\"\"Node rollout. It will execute a rollout", "is the first visits of the Node, otherwise it will return `False`. Each", "color = 1 if color == -1 else -1 winner = helpers.check_winner(sim_board) if", "and measuring performance optimization. # Example ``` class AgentNew(AgentBase, TimerStrategy): def action(self, board):", "node_type(board=board, parent=self, position=column, color=-self.color) return node def _get_memory(self, board, color): hash = self.hash(board,", "evaluation of a given state. This class uses a __zobrist hashing table__ to", "1 if color == -1 else -1 winner = helpers.check_winner(sim_board) if winner: return", "total number of score is the sum of all scores made by the", "(*) UBC1 is an algorithm which calculates the distribution of search effort for", "self._visits = 0 self._children = None super().__init__() if parent: self._hash = self._gen_hash() self._save()", "recalculated during the match. # Return float, score of the Node `board` \"\"\"", "= copy(board) end = False while not end: column = self.random_choice(sim_board) if column", "execute a rollout if this is the first visits of the Node, otherwise", "measuring performance optimization. # Example ``` class AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start()", "class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a base for a complex node for", "variables to begin a new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0 @staticmethod", "<reponame>yuriharrison/connect-four-lab<gh_stars>0 \"\"\"Monte Carlo strategies\"\"\" import math from copy import copy, deepcopy from .", "def rollout(self): \"\"\"Node rollout. It will execute a rollout if this is the", "a __zobrist hashing table__ to optimize the search. # Arguments board: matrix, required,", "Each rollout adds a visit in the counter and the score of the", "it doesn't have a default board evaluation algorithm. When inherited it's required to", "this dictionary will store all searches with zobrist hashing. parent: `Node` object, required", "color) if hash in self.memory: return hash else: return def _gen_hash(self): self.hash(self.board, self.color)", "position self.parent = parent self.memory = memory if memory is not None else", "visits of the Node, otherwise it will return `False`. Each rollout adds a", "if this is the first visits of the Node, otherwise it will return", "[documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if winner: return winner sim_board = copy(board) end", "deepiest depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is", "has specific methods to perform the Monte Carlo Tree Search. This class __don't", "total number of score divided by the number of visits - the total", "help when measuring the depth of a tree search. Useful when debugging and", "helpers from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method necessary", "return math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self): return", "raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if self._visits == 0: return math.inf lnN", "is a base for a complex node for a Monte Carlo Tree Searches.", "new \"children\" `Node` object (`new_node` method). # Properties UCB1: float, UCB1 value (*)", "the childen Nodes. Each node is generated from the available possitions in the", "UCB1: float, UCB1 value (*) of the node. visits: int, total number of", "in a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if winner:", "when the rollout occur, `False` when it do not. \"\"\" if self.parent and", "board: matrix, required, board state to be simulated color: int, required, id of", "return 0 class DepthMeasure: \"\"\"Use this class to help when measuring the depth", "position: int, required (**), default None - Index of the column which generated", "return self.__score/self._visits def _save(self): self.memory[self._hash] = self def rollout_score(self): \"\"\"This method must return", "rollout occur in the first visit of the Node, the value is stored", "0 @staticmethod def add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def", "int, index of the last column # Return A new instance of `Node`", "board, color): hash = self.hash(board, color) if hash in self.memory: return hash else:", "a visit in the counter and the score of the `board` to the", "winner: return winner sim_board = copy(board) end = False while not end: column", "= 0 @staticmethod def add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod", "Node board state (`self.board`), from the perspective of the player id 1. This", "== 0: return 0 return self.__score/self._visits def _save(self): self.memory[self._hash] = self def rollout_score(self):", "DepthMeasure.add() if not self._children: childs = [] board = self.board for column, row", "if it's the deepiest till know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest =", "float, score of the Node `board` \"\"\" pass def rollout(self): \"\"\"Node rollout. It", "of the node. visits: int, total number of Node visits value: float, total", "self.__score/self._visits def _save(self): self.memory[self._hash] = self def rollout_score(self): \"\"\"This method must return a", "not None else parent.memory self.__score = 0 self._visits = 0 self._children = None", "def value(self): if self._visits == 0: return 0 return self.__score/self._visits def _save(self): self.memory[self._hash]", "ZobristHashingStrategy): \"\"\"Node class is a base for a complex node for a Monte", "visits - the total number of score is the sum of all scores", "algorithm which calculates the distribution of search effort for exploration and exploitation in", "else None def new_node(self, board, column): \"\"\"This method is called by `children` method", "a new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0 @staticmethod def add(): \"\"\"Add", "a tree search. Useful when debugging and measuring performance optimization. # Example ```", "of the Node `board` \"\"\" pass def rollout(self): \"\"\"Node rollout. It will execute", "current Node. \"\"\" # DepthMeasure.add() if not self._children: childs = [] board =", "DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def print(): \"\"\"Print the deepiest depth", "when creating a new \"children\" `Node` object (`new_node` method). # Properties UCB1: float,", "Index of the column which generated the board current `board`. color: int, required", "object (`new_node` method). # Properties UCB1: float, UCB1 value (*) of the node.", "creating a new \"children\" `Node` object (`new_node` method). # Properties UCB1: float, UCB1", "by `children` method to generate a new Node. # Arguments: board: matrix, new", "visits(self): return self._visits @property def value(self): if self._visits == 0: return 0 return", "possitions in the `board` of the current Node. \"\"\" # DepthMeasure.add() if not", "'rollout_score') @property def UCB1(self): if self._visits == 0: return math.inf lnN = math.log1p(self.parent.visits)", "occur in the first visit of the Node, the value is stored in", "0 self._children = None super().__init__() if parent: self._hash = self._gen_hash() self._save() def __init_subclass__(cls):", "depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a base for a complex", "methods to perform the Monte Carlo Tree Search. This class __don't work on", "it do not. \"\"\" if self.parent and self._visits == 0: score = self.rollout_score()", "board): DepthMeasure.start() self.start_timer(...) while not self.time_out: # place the line \"DepthMeasure.add()\" inside the", "= 0 node = self._get_memory(new_board, -self.color) if not node: node = self.new_node(new_board, column)", "uses a __zobrist hashing table__ to optimize the search. # Arguments board: matrix,", "self.board = board self.color = color self.position = position self.parent = parent self.memory", "node for a Monte Carlo Tree Searches. This class has specific methods to", "score divided by the number of visits - the total number of score", "@property def visits(self): return self._visits @property def value(self): if self._visits == 0: return", "debugging and measuring performance optimization. # Example ``` class AgentNew(AgentBase, TimerStrategy): def action(self,", "called every rollout. The rollout occur in the first visit of the Node,", "= deepcopy(board) board[column,row] = 0 node = self._get_memory(new_board, -self.color) if not node: node", "def start(): \"\"\"Reset all the variables to begin a new measurement.\"\"\" DepthMeasure.current_depth =", "empty dictionary, required(*), default None - this dictionary will store all searches with", "= childs return self._children if len(self._children) > 0 else None def new_node(self, board,", "= self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score')", "winner: return winner return 0 class DepthMeasure: \"\"\"Use this class to help when", "board[column,row] = 0 node = self._get_memory(new_board, -self.color) if not node: node = self.new_node(new_board,", "return 0 return self.__score/self._visits def _save(self): self.memory[self._hash] = self def rollout_score(self): \"\"\"This method", "of Node visits value: float, total number of score divided by the number", "= type(self) node = node_type(board=board, parent=self, position=column, color=-self.color) return node def _get_memory(self, board,", "\"\"\"Reset the depth before start a new search episode. Save the current depth", "inherited it's required to implement the `rollout_score` method which is an evaluation of", "self.__score = 0 self._visits = 0 self._children = None super().__init__() if parent: self._hash", "default 1 (*) required when creating a new root `Node` object. (**) required", "AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None, parent=None, position=None, color=1): self.board", "self.parent = parent self.memory = memory if memory is not None else parent.memory", "till the board hits a terminal state then the value is returned. #", "new Node. # Arguments: board: matrix, new board state column: int, index of", "childs return self._children if len(self._children) > 0 else None def new_node(self, board, column):", "Arguments: board: matrix, new board state column: int, index of the last column", "self._children = childs return self._children if len(self._children) > 0 else None def new_node(self,", "measuring the depth of a tree search. Useful when debugging and measuring performance", "the winner of the simulation or zero in case the simulation ends in", ". import Strategy, RandomStrategy, ZobristHashingStrategy from . import RandomStrategy from ... import helpers", "childen Nodes. Generate or get from the memory, all the childen Nodes. Each", "self.hash(board, color) if hash in self.memory: return hash else: return def _gen_hash(self): self.hash(self.board,", "a default board evaluation algorithm. When inherited it's required to implement the `rollout_score`", "`Node` object. (**) required when creating a new \"children\" `Node` object (`new_node` method).", "divided by the number of visits - the total number of score is", "return ... ``` \"\"\" current_depth = 0 deepiest = 0 @staticmethod def start():", "\"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def reset(): \"\"\"Reset the depth", "print(): \"\"\"Print the deepiest depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy):", "def add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def reset(): \"\"\"Reset", "and exploitation in the Monte Carlo Tree Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation)", "import math from copy import copy, deepcopy from . import Strategy, RandomStrategy, ZobristHashingStrategy", "the player id 1. This method is called every rollout. The rollout occur", "memory, all the childen Nodes. Each node is generated from the available possitions", "#no options left = tie break next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos] =", "the simulation ends in a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner =", "required(*), default None - this dictionary will store all searches with zobrist hashing.", "object, required (**), default None - Node above in the tree hierarchy. position:", "tie break next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos] = color color = 1", "if memory is not None else parent.memory self.__score = 0 self._visits = 0", "self.color new_board = deepcopy(board) board[column,row] = 0 node = self._get_memory(new_board, -self.color) if not", "id of the owner of the board state # Return Id of the", "# Return Id of the winner of the simulation or zero in case", "column): \"\"\"This method is called by `children` method to generate a new Node.", "@staticmethod def start(): \"\"\"Reset all the variables to begin a new measurement.\"\"\" DepthMeasure.current_depth", "self.board for column, row in helpers.available_positions(board): board[column,row] = self.color new_board = deepcopy(board) board[column,row]", "Generate or get from the memory, all the childen Nodes. Each node is", "the score of the `board` to the Node and all the parents above", "if self._visits == 0: return math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits)", "is generated from the available possitions in the `board` of the current Node.", "state (`self.board`), from the perspective of the player id 1. This method is", "new search episode. Save the current depth if it's the deepiest till know.", "column, row in helpers.available_positions(board): board[column,row] = self.color new_board = deepcopy(board) board[column,row] = 0", "for a Monte Carlo Tree Searches. This class has specific methods to perform", "All turns are played randomly till the board hits a terminal state then", "helpers.check_winner(sim_board) if winner: return winner return 0 class DepthMeasure: \"\"\"Use this class to", "must return a score (float), evaluation, for the Node board state (`self.board`), from", "next_pos] = color color = 1 if color == -1 else -1 winner", "winner return 0 class DepthMeasure: \"\"\"Use this class to help when measuring the", "end: column = self.random_choice(sim_board) if column == None: #no options left = tie", "is __not__ recalculated during the match. # Return float, score of the Node", "\"children\" `Node` object (`new_node` method). # Properties UCB1: float, UCB1 value (*) of", "id 1. This method is called every rollout. The rollout occur in the", "[documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None, parent=None, position=None, color=1): self.board = board self.color", "SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method necessary to simulate matches.\"\"\" def simulate(self, board,", "self._children if len(self._children) > 0 else None def new_node(self, board, column): \"\"\"This method", "@property def UCB1(self): if self._visits == 0: return math.inf lnN = math.log1p(self.parent.visits) return", "# Return A new instance of `Node` object. \"\"\" node_type = type(self) node", "an evaluation of a given state. This class uses a __zobrist hashing table__", "till know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0", "a score (float), evaluation, for the Node board state (`self.board`), from the perspective", "during the match. # Return float, score of the Node `board` \"\"\" pass", "the end from a given board state. All turns are played randomly till", "not node: node = self.new_node(new_board, column) childs.append(node) self._children = childs return self._children if", "search. Useful when debugging and measuring performance optimization. # Example ``` class AgentNew(AgentBase,", "color=-self.color) return node def _get_memory(self, board, color): hash = self.hash(board, color) if hash", "then the value is returned. # Arguments board: matrix, required, board state to", "int, required (**), default 1 (*) required when creating a new root `Node`", "self._visits @property def value(self): if self._visits == 0: return 0 return self.__score/self._visits def", "return `False`. Each rollout adds a visit in the counter and the score", "import helpers from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method", "+ 2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits @property def value(self): if self._visits ==", "\"\"\" node_type = type(self) node = node_type(board=board, parent=self, position=column, color=-self.color) return node def", "= None super().__init__() if parent: self._hash = self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score'", "class has specific methods to perform the Monte Carlo Tree Search. This class", "(**), default None - Node above in the tree hierarchy. position: int, required", "not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if self._visits == 0:", "print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a base for", "strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None, parent=None,", "search effort for exploration and exploitation in the Monte Carlo Tree Search strategy.", "necessary to simulate matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate a match to the", "RandomStrategy from ... import helpers from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy", "all scores made by the Node and his children. (*) UBC1 is an", "None: #no options left = tie break next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos]", "0 deepiest = 0 @staticmethod def start(): \"\"\"Reset all the variables to begin", "default None - this dictionary will store all searches with zobrist hashing. parent:", "of the owner of the board state # Return Id of the winner", "start a new search episode. Save the current depth if it's the deepiest", "depth if it's the deepiest till know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest", "def print(): \"\"\"Print the deepiest depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy,", "are played randomly till the board hits a terminal state then the value", "helpers.next_position(sim_board, column) sim_board[column, next_pos] = color color = 1 if color == -1", "be simulated color: int, required, id of the owner of the board state", "\"\"\" pass def rollout(self): \"\"\"Node rollout. It will execute a rollout if this", "def action(self, board): DepthMeasure.start() self.start_timer(...) while not self.time_out: # place the line \"DepthMeasure.add()\"", "# Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None, parent=None, position=None,", "'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if self._visits ==", "inside the # search method, when it creates a new depth self.run_search() DepthMeasure.reset()", "+= value if self.parent: self.parent.add_score(value) def add_visit(self): self._visits += 1 if self.parent: self.parent.add_visit()", "= self._get_memory(new_board, -self.color) if not node: node = self.new_node(new_board, column) childs.append(node) self._children =", "case the simulation ends in a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner", "sum of all scores made by the Node and his children. (*) UBC1", "a new root `Node` object. (**) required when creating a new \"children\" `Node`", "Tree Search. This class __don't work on its own__, because it doesn't have", "episode. Save the current depth if it's the deepiest till know. \"\"\" if", "number of Node visits value: float, total number of score divided by the", "board state column: int, index of the last column # Return A new", "def UCB1(self): if self._visits == 0: return math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits", "-1 winner = helpers.check_winner(sim_board) if winner: return winner return 0 class DepthMeasure: \"\"\"Use", "= False while not end: column = self.random_choice(sim_board) if column == None: #no", "2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits @property def value(self): if self._visits == 0:", "to the Node and all the parents above in the tree. # Return", "measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0 @staticmethod def add(): \"\"\"Add more 1", "# Return `True` when the rollout occur, `False` when it do not. \"\"\"", "= 0 self._visits = 0 self._children = None super().__init__() if parent: self._hash =", "The rollout occur in the first visit of the Node, the value is", "will execute a rollout if this is the first visits of the Node,", "an algorithm which calculates the distribution of search effort for exploration and exploitation", "the value is returned. # Arguments board: matrix, required, board state to be", "board state # Return Id of the winner of the simulation or zero", "new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0 @staticmethod def add(): \"\"\"Add more", "# place the line \"DepthMeasure.add()\" inside the # search method, when it creates", "new_board = deepcopy(board) board[column,row] = 0 node = self._get_memory(new_board, -self.color) if not node:", "depth of a tree search. Useful when debugging and measuring performance optimization. #", "None super().__init__() if parent: self._hash = self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score' not", "a match to the end from a given board state. All turns are", "import RandomStrategy from ... import helpers from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation", "not. \"\"\" if self.parent and self._visits == 0: score = self.rollout_score() self.add_score(score) self.add_visit()", "the current depth if it's the deepiest till know. \"\"\" if DepthMeasure.current_depth >", "simulation ends in a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board)", "Return float, score of the Node `board` \"\"\" pass def rollout(self): \"\"\"Node rollout.", "for the Node board state (`self.board`), from the perspective of the player id", "available possitions in the `board` of the current Node. \"\"\" # DepthMeasure.add() if", "ZobristHashingStrategy from . import RandomStrategy from ... import helpers from ...exceptions import BadImplementation", "# Properties UCB1: float, UCB1 value (*) of the node. visits: int, total", "of the Node, the value is stored in the zobrist table and it", "def add_score(self, value): self.__score += value if self.parent: self.parent.add_score(value) def add_visit(self): self._visits +=", "which calculates the distribution of search effort for exploration and exploitation in the", "def __init_subclass__(cls): if 'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self):", "all searches with zobrist hashing. parent: `Node` object, required (**), default None -", "simulate matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate a match to the end from", "all the variables to begin a new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest =", "self._children = None super().__init__() if parent: self._hash = self._gen_hash() self._save() def __init_subclass__(cls): if", "the Node and all the parents above in the tree. # Return `True`", "the `board` of the current Node. \"\"\" # DepthMeasure.add() if not self._children: childs", "None - this dictionary will store all searches with zobrist hashing. parent: `Node`", "match to the end from a given board state. All turns are played", "last column # Return A new instance of `Node` object. \"\"\" node_type =", "copy, deepcopy from . import Strategy, RandomStrategy, ZobristHashingStrategy from . import RandomStrategy from", "sim_board = copy(board) end = False while not end: column = self.random_choice(sim_board) if", "DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def print(): \"\"\"Print the deepiest", "object. \"\"\" node_type = type(self) node = node_type(board=board, parent=self, position=column, color=-self.color) return node", "the deepiest depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class", "def __init__(self, board, memory=None, parent=None, position=None, color=1): self.board = board self.color = color", "self.add_score(score) self.add_visit() return True else: return False def add_score(self, value): self.__score += value", "of the board state # Return Id of the winner of the simulation", "simulated color: int, required, id of the owner of the board state #", "\"DepthMeasure.add()\" inside the # search method, when it creates a new depth self.run_search()", "state column: int, index of the last column # Return A new instance", "DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\" current_depth = 0 deepiest = 0 @staticmethod", "TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...) while not self.time_out: # place the line", "own__, because it doesn't have a default board evaluation algorithm. When inherited it's", "cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if self._visits == 0: return math.inf", "(**), default None - Index of the column which generated the board current", "before start a new search episode. Save the current depth if it's the", "import copy, deepcopy from . import Strategy, RandomStrategy, ZobristHashingStrategy from . import RandomStrategy", "score of the Node `board` \"\"\" pass def rollout(self): \"\"\"Node rollout. It will", "and his children. (*) UBC1 is an algorithm which calculates the distribution of", "self._children: childs = [] board = self.board for column, row in helpers.available_positions(board): board[column,row]", "= self.board for column, row in helpers.available_positions(board): board[column,row] = self.color new_board = deepcopy(board)", "\"\"\" current_depth = 0 deepiest = 0 @staticmethod def start(): \"\"\"Reset all the", "made by the Node and his children. (*) UBC1 is an algorithm which", "column) childs.append(node) self._children = childs return self._children if len(self._children) > 0 else None", "above in the tree. # Return `True` when the rollout occur, `False` when", "with zobrist hashing. parent: `Node` object, required (**), default None - Node above", "= self.color new_board = deepcopy(board) board[column,row] = 0 node = self._get_memory(new_board, -self.color) if", "\"\"\"Reset all the variables to begin a new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest", "the depth before start a new search episode. Save the current depth if", "of visits - the total number of score is the sum of all", "-1 else -1 winner = helpers.check_winner(sim_board) if winner: return winner return 0 class", "1 (*) required when creating a new root `Node` object. (**) required when", "None - Node above in the tree hierarchy. position: int, required (**), default", "new board state column: int, index of the last column # Return A", "is not None else parent.memory self.__score = 0 self._visits = 0 self._children =", "the available possitions in the `board` of the current Node. \"\"\" # DepthMeasure.add()", "first visits of the Node, otherwise it will return `False`. Each rollout adds", "- the total number of score is the sum of all scores made", "lnN = math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits @property", "base for a complex node for a Monte Carlo Tree Searches. This class", "DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0 @staticmethod def add(): \"\"\"Add more 1 depth.\"\"\"", "BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method necessary to simulate matches.\"\"\" def", "column which generated the board current `board`. color: int, required (**), default 1", "\"\"\"Node class is a base for a complex node for a Monte Carlo", "dictionary will store all searches with zobrist hashing. parent: `Node` object, required (**),", "called by `children` method to generate a new Node. # Arguments: board: matrix,", "depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\" current_depth = 0 deepiest =", "= 0 @staticmethod def print(): \"\"\"Print the deepiest depth reached.\"\"\" print('Last play depth:',", "in the `board` of the current Node. \"\"\" # DepthMeasure.add() if not self._children:", "a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if winner: return", "pass def rollout(self): \"\"\"Node rollout. It will execute a rollout if this is", "return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits @property def value(self): if", "default None - Node above in the tree hierarchy. position: int, required (**),", "DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a base for a complex node", "Node visits value: float, total number of score divided by the number of", "when debugging and measuring performance optimization. # Example ``` class AgentNew(AgentBase, TimerStrategy): def", "Node. \"\"\" # DepthMeasure.add() if not self._children: childs = [] board = self.board", "\"\"\"Monte Carlo strategies\"\"\" import math from copy import copy, deepcopy from . import", "creates a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\" current_depth =", "Carlo Tree Search. This class __don't work on its own__, because it doesn't", "score is the sum of all scores made by the Node and his", "if column == None: #no options left = tie break next_pos = helpers.next_position(sim_board,", "generated the board current `board`. color: int, required (**), default 1 (*) required", "Nodes. Generate or get from the memory, all the childen Nodes. Each node", "the current Node. \"\"\" # DepthMeasure.add() if not self._children: childs = [] board", "board state to be simulated color: int, required, id of the owner of", "method must return a score (float), evaluation, for the Node board state (`self.board`),", "math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits @property def value(self):", "_get_memory(self, board, color): hash = self.hash(board, color) if hash in self.memory: return hash", "of a tree search. Useful when debugging and measuring performance optimization. # Example", "visits: int, total number of Node visits value: float, total number of score", "position=column, color=-self.color) return node def _get_memory(self, board, color): hash = self.hash(board, color) if", "total number of Node visits value: float, total number of score divided by", "1 if self.parent: self.parent.add_visit() def children(self): \"\"\"Get all the childen Nodes. Generate or", "`True` when the rollout occur, `False` when it do not. \"\"\" if self.parent", "Node and all the parents above in the tree. # Return `True` when", "Return Id of the winner of the simulation or zero in case the", "> DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def print(): \"\"\"Print the", "= self.hash(board, color) if hash in self.memory: return hash else: return def _gen_hash(self):", "which generated the board current `board`. color: int, required (**), default 1 (*)", "in the tree hierarchy. position: int, required (**), default None - Index of", "of search effort for exploration and exploitation in the Monte Carlo Tree Search", "# Example ``` class AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...) while not", "hashing table__ to optimize the search. # Arguments board: matrix, required, board state", "it's the deepiest till know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth", "tree search. Useful when debugging and measuring performance optimization. # Example ``` class", "tree hierarchy. position: int, required (**), default None - Index of the column", "rollout. It will execute a rollout if this is the first visits of", "of the winner of the simulation or zero in case the simulation ends", "counter and the score of the `board` to the Node and all the", "begin a new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0 @staticmethod def add():", "\"\"\"This method must return a score (float), evaluation, for the Node board state", "Monte Carlo Tree Search. This class __don't work on its own__, because it", "(`new_node` method). # Properties UCB1: float, UCB1 value (*) of the node. visits:", "if not node: node = self.new_node(new_board, column) childs.append(node) self._children = childs return self._children", "self.parent: self.parent.add_visit() def children(self): \"\"\"Get all the childen Nodes. Generate or get from", "0 @staticmethod def print(): \"\"\"Print the deepiest depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest)", "childs = [] board = self.board for column, row in helpers.available_positions(board): board[column,row] =", "all the childen Nodes. Each node is generated from the available possitions in", "in helpers.available_positions(board): board[column,row] = self.color new_board = deepcopy(board) board[column,row] = 0 node =", "copy(board) end = False while not end: column = self.random_choice(sim_board) if column ==", "`False`. Each rollout adds a visit in the counter and the score of", "in the Monte Carlo Tree Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn)", "return a score (float), evaluation, for the Node board state (`self.board`), from the", "childen Nodes. Each node is generated from the available possitions in the `board`", "color self.position = position self.parent = parent self.memory = memory if memory is", "board state (`self.board`), from the perspective of the player id 1. This method", "default board evaluation algorithm. When inherited it's required to implement the `rollout_score` method", "def rollout_score(self): \"\"\"This method must return a score (float), evaluation, for the Node", "Nodes. Each node is generated from the available possitions in the `board` of", "def _save(self): self.memory[self._hash] = self def rollout_score(self): \"\"\"This method must return a score", "a new \"children\" `Node` object (`new_node` method). # Properties UCB1: float, UCB1 value", "class __don't work on its own__, because it doesn't have a default board", "is the sum of all scores made by the Node and his children.", "def simulate(self, board, color=-1): \"\"\"Simulate a match to the end from a given", "Useful when debugging and measuring performance optimization. # Example ``` class AgentNew(AgentBase, TimerStrategy):", "rollout. The rollout occur in the first visit of the Node, the value", "0 self._visits = 0 self._children = None super().__init__() if parent: self._hash = self._gen_hash()", "self.parent.add_score(value) def add_visit(self): self._visits += 1 if self.parent: self.parent.add_visit() def children(self): \"\"\"Get all", "- Node above in the tree hierarchy. position: int, required (**), default None", "parent self.memory = memory if memory is not None else parent.memory self.__score =", "self.add_visit() return True else: return False def add_score(self, value): self.__score += value if", "= 0 DepthMeasure.deepiest = 0 @staticmethod def add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth", "0 class DepthMeasure: \"\"\"Use this class to help when measuring the depth of", "return self._children if len(self._children) > 0 else None def new_node(self, board, column): \"\"\"This", "= helpers.next_position(sim_board, column) sim_board[column, next_pos] = color color = 1 if color ==", "all the parents above in the tree. # Return `True` when the rollout", "# Arguments board: matrix, required, board state memory: empty dictionary, required(*), default None", "def reset(): \"\"\"Reset the depth before start a new search episode. Save the", "Carlo strategies\"\"\" import math from copy import copy, deepcopy from . import Strategy,", "it's required to implement the `rollout_score` method which is an evaluation of a", "- Index of the column which generated the board current `board`. color: int,", "column == None: #no options left = tie break next_pos = helpers.next_position(sim_board, column)", "def new_node(self, board, column): \"\"\"This method is called by `children` method to generate", "This class uses a __zobrist hashing table__ to optimize the search. # Arguments", "board = self.board for column, row in helpers.available_positions(board): board[column,row] = self.color new_board =", "and all the parents above in the tree. # Return `True` when the", "UCB1(self): if self._visits == 0: return math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits +", "number of visits - the total number of score is the sum of", "self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits @property def value(self): if self._visits", "column = self.random_choice(sim_board) if column == None: #no options left = tie break", "(float), evaluation, for the Node board state (`self.board`), from the perspective of the", "rollout adds a visit in the counter and the score of the `board`", "if self.parent and self._visits == 0: score = self.rollout_score() self.add_score(score) self.add_visit() return True", "the `board` to the Node and all the parents above in the tree.", "state then the value is returned. # Arguments board: matrix, required, board state", "int, total number of Node visits value: float, total number of score divided", "when it do not. \"\"\" if self.parent and self._visits == 0: score =", "DepthMeasure.current_depth += 1 @staticmethod def reset(): \"\"\"Reset the depth before start a new", "the board hits a terminal state then the value is returned. # Arguments", "new_node(self, board, column): \"\"\"This method is called by `children` method to generate a", "None - Index of the column which generated the board current `board`. color:", "self._visits == 0: score = self.rollout_score() self.add_score(score) self.add_visit() return True else: return False", "a terminal state then the value is returned. # Arguments board: matrix, required,", "state. This class uses a __zobrist hashing table__ to optimize the search. #", "`Node` object (`new_node` method). # Properties UCB1: float, UCB1 value (*) of the", "of the column which generated the board current `board`. color: int, required (**),", "strategies\"\"\" import math from copy import copy, deepcopy from . import Strategy, RandomStrategy,", "specific methods to perform the Monte Carlo Tree Search. This class __don't work", "value): self.__score += value if self.parent: self.parent.add_score(value) def add_visit(self): self._visits += 1 if", "a given state. This class uses a __zobrist hashing table__ to optimize the", "given board state. All turns are played randomly till the board hits a", "every rollout. The rollout occur in the first visit of the Node, the", "True else: return False def add_score(self, value): self.__score += value if self.parent: self.parent.add_score(value)", "method). # Properties UCB1: float, UCB1 value (*) of the node. visits: int,", "start(): \"\"\"Reset all the variables to begin a new measurement.\"\"\" DepthMeasure.current_depth = 0", "the match. # Return float, score of the Node `board` \"\"\" pass def", "memory: empty dictionary, required(*), default None - this dictionary will store all searches", "color color = 1 if color == -1 else -1 winner = helpers.check_winner(sim_board)", "adds a visit in the counter and the score of the `board` to", "from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method necessary to", "\"\"\" winner = helpers.check_winner(board) if winner: return winner sim_board = copy(board) end =", "# DepthMeasure.add() if not self._children: childs = [] board = self.board for column,", "know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod", "children. (*) UBC1 is an algorithm which calculates the distribution of search effort", "`False` when it do not. \"\"\" if self.parent and self._visits == 0: score", "add_score(self, value): self.__score += value if self.parent: self.parent.add_score(value) def add_visit(self): self._visits += 1", "to be simulated color: int, required, id of the owner of the board", "0 DepthMeasure.deepiest = 0 @staticmethod def add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth +=", "a complex node for a Monte Carlo Tree Searches. This class has specific", "board state. All turns are played randomly till the board hits a terminal", "memory=None, parent=None, position=None, color=1): self.board = board self.color = color self.position = position", "a rollout if this is the first visits of the Node, otherwise it", "player id 1. This method is called every rollout. The rollout occur in", "simulate(self, board, color=-1): \"\"\"Simulate a match to the end from a given board", "Id of the winner of the simulation or zero in case the simulation", "DepthMeasure.deepiest = 0 @staticmethod def add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth += 1", "__init_subclass__(cls): if 'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if", "This class __don't work on its own__, because it doesn't have a default", "from . import Strategy, RandomStrategy, ZobristHashingStrategy from . import RandomStrategy from ... import", "the Node board state (`self.board`), from the perspective of the player id 1.", "`children` method to generate a new Node. # Arguments: board: matrix, new board", "to implement the `rollout_score` method which is an evaluation of a given state.", "the column which generated the board current `board`. color: int, required (**), default", "False def add_score(self, value): self.__score += value if self.parent: self.parent.add_score(value) def add_visit(self): self._visits", "= position self.parent = parent self.memory = memory if memory is not None", "because it doesn't have a default board evaluation algorithm. When inherited it's required", "evaluation, for the Node board state (`self.board`), from the perspective of the player", "table and it is __not__ recalculated during the match. # Return float, score", "perform the Monte Carlo Tree Search. This class __don't work on its own__,", "store all searches with zobrist hashing. parent: `Node` object, required (**), default None", "self.parent and self._visits == 0: score = self.rollout_score() self.add_score(score) self.add_visit() return True else:", "from copy import copy, deepcopy from . import Strategy, RandomStrategy, ZobristHashingStrategy from .", "self.random_choice(sim_board) if column == None: #no options left = tie break next_pos =", "turns are played randomly till the board hits a terminal state then the", "is an evaluation of a given state. This class uses a __zobrist hashing", "[documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None, parent=None, position=None, color=1): self.board =", "is called every rollout. The rollout occur in the first visit of the", "is an algorithm which calculates the distribution of search effort for exploration and", "winner sim_board = copy(board) end = False while not end: column = self.random_choice(sim_board)", "value is returned. # Arguments board: matrix, required, board state to be simulated", "node = self.new_node(new_board, column) childs.append(node) self._children = childs return self._children if len(self._children) >", "= memory if memory is not None else parent.memory self.__score = 0 self._visits", "int, required (**), default None - Index of the column which generated the", "of the player id 1. This method is called every rollout. The rollout", "# Arguments: board: matrix, new board state column: int, index of the last", "the board state # Return Id of the winner of the simulation or", "== 0: score = self.rollout_score() self.add_score(score) self.add_visit() return True else: return False def", "self.new_node(new_board, column) childs.append(node) self._children = childs return self._children if len(self._children) > 0 else", "is stored in the zobrist table and it is __not__ recalculated during the", "search method, when it creates a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ...", "value (*) of the node. visits: int, total number of Node visits value:", "self._get_memory(new_board, -self.color) if not node: node = self.new_node(new_board, column) childs.append(node) self._children = childs", "state # Return Id of the winner of the simulation or zero in", "of the `board` to the Node and all the parents above in the", "rollout_score(self): \"\"\"This method must return a score (float), evaluation, for the Node board", "math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits", "stored in the zobrist table and it is __not__ recalculated during the match.", "childs.append(node) self._children = childs return self._children if len(self._children) > 0 else None def", "in case the simulation ends in a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\"", "Node above in the tree hierarchy. position: int, required (**), default None -", "self def rollout_score(self): \"\"\"This method must return a score (float), evaluation, for the", "from ... import helpers from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide", "required, board state to be simulated color: int, required, id of the owner", "= self.new_node(new_board, column) childs.append(node) self._children = childs return self._children if len(self._children) > 0", "its own__, because it doesn't have a default board evaluation algorithm. When inherited", "of all scores made by the Node and his children. (*) UBC1 is", "AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if winner: return winner sim_board = copy(board)", "the parents above in the tree. # Return `True` when the rollout occur,", "not self.time_out: # place the line \"DepthMeasure.add()\" inside the # search method, when", "from the memory, all the childen Nodes. Each node is generated from the", "node def _get_memory(self, board, color): hash = self.hash(board, color) if hash in self.memory:", "the number of visits - the total number of score is the sum", "to begin a new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0 @staticmethod def", "winner of the simulation or zero in case the simulation ends in a", "# search method, when it creates a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return", "Search. This class __don't work on its own__, because it doesn't have a", "rollout if this is the first visits of the Node, otherwise it will", "to help when measuring the depth of a tree search. Useful when debugging", "- this dictionary will store all searches with zobrist hashing. parent: `Node` object,", "of a given state. This class uses a __zobrist hashing table__ to optimize", "if not self._children: childs = [] board = self.board for column, row in", "Return `True` when the rollout occur, `False` when it do not. \"\"\" if", "state to be simulated color: int, required, id of the owner of the", "class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method necessary to simulate matches.\"\"\" def simulate(self,", "self.memory = memory if memory is not None else parent.memory self.__score = 0", "occur, `False` when it do not. \"\"\" if self.parent and self._visits == 0:", "hashing. parent: `Node` object, required (**), default None - Node above in the", "UCB1 value (*) of the node. visits: int, total number of Node visits", "board, column): \"\"\"This method is called by `children` method to generate a new", "= color self.position = position self.parent = parent self.memory = memory if memory", "in the counter and the score of the `board` to the Node and", "from the perspective of the player id 1. This method is called every", "rollout occur, `False` when it do not. \"\"\" if self.parent and self._visits ==", "math from copy import copy, deepcopy from . import Strategy, RandomStrategy, ZobristHashingStrategy from", "on its own__, because it doesn't have a default board evaluation algorithm. When", "evaluation algorithm. When inherited it's required to implement the `rollout_score` method which is", "end from a given board state. All turns are played randomly till the", "matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate a match to the end from a", "return True else: return False def add_score(self, value): self.__score += value if self.parent:", "the first visit of the Node, the value is stored in the zobrist", "doesn't have a default board evaluation algorithm. When inherited it's required to implement", "This method is called every rollout. The rollout occur in the first visit", "do not. \"\"\" if self.parent and self._visits == 0: score = self.rollout_score() self.add_score(score)", "-self.color) if not node: node = self.new_node(new_board, column) childs.append(node) self._children = childs return", "the sum of all scores made by the Node and his children. (*)", "= DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def print(): \"\"\"Print the deepiest depth reached.\"\"\"", "\"\"\"Node rollout. It will execute a rollout if this is the first visits", "@staticmethod def add(): \"\"\"Add more 1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def reset():", "Stragegy provide the method necessary to simulate matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate", "to optimize the search. # Arguments board: matrix, required, board state memory: empty", "0 return self.__score/self._visits def _save(self): self.memory[self._hash] = self def rollout_score(self): \"\"\"This method must", "the Node, otherwise it will return `False`. Each rollout adds a visit in", "deepcopy(board) board[column,row] = 0 node = self._get_memory(new_board, -self.color) if not node: node =", "given state. This class uses a __zobrist hashing table__ to optimize the search.", "of the current Node. \"\"\" # DepthMeasure.add() if not self._children: childs = []", "the simulation or zero in case the simulation ends in a draw. #", "have a default board evaluation algorithm. When inherited it's required to implement the", "algorithm. When inherited it's required to implement the `rollout_score` method which is an", "column: int, index of the last column # Return A new instance of", "terminal state then the value is returned. # Arguments board: matrix, required, board", "node is generated from the available possitions in the `board` of the current", "@property def value(self): if self._visits == 0: return 0 return self.__score/self._visits def _save(self):", "__don't work on its own__, because it doesn't have a default board evaluation", "it will return `False`. Each rollout adds a visit in the counter and", "required (**), default 1 (*) required when creating a new root `Node` object.", "if 'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if self._visits", "Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a base for a complex node for a", "generate a new Node. # Arguments: board: matrix, new board state column: int,", "if self.parent: self.parent.add_score(value) def add_visit(self): self._visits += 1 if self.parent: self.parent.add_visit() def children(self):", "Monte Carlo Tree Searches. This class has specific methods to perform the Monte", "= math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def visits(self): return self._visits @property def", "`Node` object, required (**), default None - Node above in the tree hierarchy.", "the distribution of search effort for exploration and exploitation in the Monte Carlo", "method is called every rollout. The rollout occur in the first visit of", "is called by `children` method to generate a new Node. # Arguments: board:", "\"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def", "zobrist hashing. parent: `Node` object, required (**), default None - Node above in", "Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None,", "work on its own__, because it doesn't have a default board evaluation algorithm.", "Strategy, RandomStrategy, ZobristHashingStrategy from . import RandomStrategy from ... import helpers from ...exceptions", "> 0 else None def new_node(self, board, column): \"\"\"This method is called by", "method is called by `children` method to generate a new Node. # Arguments:", "... import helpers from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the", "Return A new instance of `Node` object. \"\"\" node_type = type(self) node =", "type(self) node = node_type(board=board, parent=self, position=column, color=-self.color) return node def _get_memory(self, board, color):", "helpers.check_winner(board) if winner: return winner sim_board = copy(board) end = False while not", "ends in a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if", "value is stored in the zobrist table and it is __not__ recalculated during", "zero in case the simulation ends in a draw. # Example AgentSimulation: [documentation](./agents#agentsimulation)", "else -1 winner = helpers.check_winner(sim_board) if winner: return winner return 0 class DepthMeasure:", "new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\" current_depth = 0 deepiest", "\"\"\" # DepthMeasure.add() if not self._children: childs = [] board = self.board for", "Carlo Tree Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self,", "class AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...) while not self.time_out: # place", "hits a terminal state then the value is returned. # Arguments board: matrix,", "to perform the Monte Carlo Tree Search. This class __don't work on its", "Arguments board: matrix, required, board state memory: empty dictionary, required(*), default None -", "return False def add_score(self, value): self.__score += value if self.parent: self.parent.add_score(value) def add_visit(self):", "Tree Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board,", "value if self.parent: self.parent.add_score(value) def add_visit(self): self._visits += 1 if self.parent: self.parent.add_visit() def", "while not self.time_out: # place the line \"DepthMeasure.add()\" inside the # search method,", "search. # Arguments board: matrix, required, board state memory: empty dictionary, required(*), default", "the tree hierarchy. position: int, required (**), default None - Index of the", "action(self, board): DepthMeasure.start() self.start_timer(...) while not self.time_out: # place the line \"DepthMeasure.add()\" inside", "if parent: self._hash = self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score' not in cls.__dict__:", "`board` of the current Node. \"\"\" # DepthMeasure.add() if not self._children: childs =", "if self._visits == 0: return 0 return self.__score/self._visits def _save(self): self.memory[self._hash] = self", "winner = helpers.check_winner(board) if winner: return winner sim_board = copy(board) end = False", "randomly till the board hits a terminal state then the value is returned.", "copy import copy, deepcopy from . import Strategy, RandomStrategy, ZobristHashingStrategy from . import", "the Monte Carlo Tree Search. This class __don't work on its own__, because", "Node. # Arguments: board: matrix, new board state column: int, index of the", "left = tie break next_pos = helpers.next_position(sim_board, column) sim_board[column, next_pos] = color color", "\"\"\"Print the deepiest depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node", "a new search episode. Save the current depth if it's the deepiest till", "== 0: return math.inf lnN = math.log1p(self.parent.visits) return self.__score/self._visits + 2*math.sqrt(lnN/self._visits) @property def", "self.time_out: # place the line \"DepthMeasure.add()\" inside the # search method, when it", "self._visits == 0: return 0 return self.__score/self._visits def _save(self): self.memory[self._hash] = self def", "owner of the board state # Return Id of the winner of the", "board, color=-1): \"\"\"Simulate a match to the end from a given board state.", "Arguments board: matrix, required, board state to be simulated color: int, required, id", "(**) required when creating a new \"children\" `Node` object (`new_node` method). # Properties", "reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a base", "None else parent.memory self.__score = 0 self._visits = 0 self._children = None super().__init__()", "a new Node. # Arguments: board: matrix, new board state column: int, index", "= board self.color = color self.position = position self.parent = parent self.memory =", "0: return 0 return self.__score/self._visits def _save(self): self.memory[self._hash] = self def rollout_score(self): \"\"\"This", "get from the memory, all the childen Nodes. Each node is generated from", "if len(self._children) > 0 else None def new_node(self, board, column): \"\"\"This method is", "number of score divided by the number of visits - the total number", "class DepthMeasure: \"\"\"Use this class to help when measuring the depth of a", "place the line \"DepthMeasure.add()\" inside the # search method, when it creates a", "Node, otherwise it will return `False`. Each rollout adds a visit in the", "DepthMeasure.current_depth = 0 @staticmethod def print(): \"\"\"Print the deepiest depth reached.\"\"\" print('Last play", "board evaluation algorithm. When inherited it's required to implement the `rollout_score` method which", "self._save() def __init_subclass__(cls): if 'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property def", "Searches. This class has specific methods to perform the Monte Carlo Tree Search.", "self.rollout_score() self.add_score(score) self.add_visit() return True else: return False def add_score(self, value): self.__score +=", "state. All turns are played randomly till the board hits a terminal state", "exploitation in the Monte Carlo Tree Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN:", "int, required, id of the owner of the board state # Return Id", "self.color = color self.position = position self.parent = parent self.memory = memory if", "UBC1 is an algorithm which calculates the distribution of search effort for exploration", "to the end from a given board state. All turns are played randomly", "color): hash = self.hash(board, color) if hash in self.memory: return hash else: return", "return winner return 0 class DepthMeasure: \"\"\"Use this class to help when measuring", "will store all searches with zobrist hashing. parent: `Node` object, required (**), default", "the variables to begin a new measurement.\"\"\" DepthMeasure.current_depth = 0 DepthMeasure.deepiest = 0", "current `board`. color: int, required (**), default 1 (*) required when creating a", "False while not end: column = self.random_choice(sim_board) if column == None: #no options", "__not__ recalculated during the match. # Return float, score of the Node `board`", "board self.color = color self.position = position self.parent = parent self.memory = memory", "required when creating a new root `Node` object. (**) required when creating a", "__zobrist hashing table__ to optimize the search. # Arguments board: matrix, required, board", "Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None, parent=None, position=None, color=1):", "a given board state. All turns are played randomly till the board hits", "otherwise it will return `False`. Each rollout adds a visit in the counter", "and it is __not__ recalculated during the match. # Return float, score of", "zobrist table and it is __not__ recalculated during the match. # Return float,", "from the available possitions in the `board` of the current Node. \"\"\" #", "Example AgentSimulation: [documentation](./agents#agentsimulation) \"\"\" winner = helpers.check_winner(board) if winner: return winner sim_board =", "BadImplementation(cls.__name__, 'rollout_score') @property def UCB1(self): if self._visits == 0: return math.inf lnN =", "generated from the available possitions in the `board` of the current Node. \"\"\"", "required (**), default None - Index of the column which generated the board", "... ``` \"\"\" current_depth = 0 deepiest = 0 @staticmethod def start(): \"\"\"Reset", "score of the `board` to the Node and all the parents above in", "index of the last column # Return A new instance of `Node` object.", "node. visits: int, total number of Node visits value: float, total number of", "RandomStrategy, ZobristHashingStrategy from . import RandomStrategy from ... import helpers from ...exceptions import", "the Node, the value is stored in the zobrist table and it is", "0 node = self._get_memory(new_board, -self.color) if not node: node = self.new_node(new_board, column) childs.append(node)", "board[column,row] = self.color new_board = deepcopy(board) board[column,row] = 0 node = self._get_memory(new_board, -self.color)", "and the score of the `board` to the Node and all the parents", "if winner: return winner sim_board = copy(board) end = False while not end:", "board current `board`. color: int, required (**), default 1 (*) required when creating", "= 1 if color == -1 else -1 winner = helpers.check_winner(sim_board) if winner:", "self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score' not in cls.__dict__: raise BadImplementation(cls.__name__, 'rollout_score') @property", "board: matrix, required, board state memory: empty dictionary, required(*), default None - this", "Carlo Tree Searches. This class has specific methods to perform the Monte Carlo", "color=-1): \"\"\"Simulate a match to the end from a given board state. All", "provide the method necessary to simulate matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate a", "this is the first visits of the Node, otherwise it will return `False`.", "`board`. color: int, required (**), default 1 (*) required when creating a new", "\"\"\"Simulate a match to the end from a given board state. All turns", "# Arguments board: matrix, required, board state to be simulated color: int, required,", "\"\"\"Use this class to help when measuring the depth of a tree search.", "depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def reset(): \"\"\"Reset the depth before start a", "= self def rollout_score(self): \"\"\"This method must return a score (float), evaluation, for", "depth before start a new search episode. Save the current depth if it's", "float, UCB1 value (*) of the node. visits: int, total number of Node", "the board current `board`. color: int, required (**), default 1 (*) required when", "or get from the memory, all the childen Nodes. Each node is generated", "of `Node` object. \"\"\" node_type = type(self) node = node_type(board=board, parent=self, position=column, color=-self.color)", "and self._visits == 0: score = self.rollout_score() self.add_score(score) self.add_visit() return True else: return", "for a complex node for a Monte Carlo Tree Searches. This class has", "Monte Carlo Tree Search strategy. # Example AgentMonteCarlo: [documentation](./agents#agentsimulation) AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def", "to generate a new Node. # Arguments: board: matrix, new board state column:", "board, memory=None, parent=None, position=None, color=1): self.board = board self.color = color self.position =", "`rollout_score` method which is an evaluation of a given state. This class uses", "the last column # Return A new instance of `Node` object. \"\"\" node_type", "def visits(self): return self._visits @property def value(self): if self._visits == 0: return 0", "1. This method is called every rollout. The rollout occur in the first", "the Node and his children. (*) UBC1 is an algorithm which calculates the", "self.parent: self.parent.add_score(value) def add_visit(self): self._visits += 1 if self.parent: self.parent.add_visit() def children(self): \"\"\"Get", "__init__(self, board, memory=None, parent=None, position=None, color=1): self.board = board self.color = color self.position", "return self._visits @property def value(self): if self._visits == 0: return 0 return self.__score/self._visits", "visit in the counter and the score of the `board` to the Node", "node = self._get_memory(new_board, -self.color) if not node: node = self.new_node(new_board, column) childs.append(node) self._children", "0 @staticmethod def start(): \"\"\"Reset all the variables to begin a new measurement.\"\"\"", "hierarchy. position: int, required (**), default None - Index of the column which", "from a given board state. All turns are played randomly till the board", "the node. visits: int, total number of Node visits value: float, total number", "color == -1 else -1 winner = helpers.check_winner(sim_board) if winner: return winner return", "`board` \"\"\" pass def rollout(self): \"\"\"Node rollout. It will execute a rollout if", "\"\"\"This method is called by `children` method to generate a new Node. #", "play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a base for a", "0: score = self.rollout_score() self.add_score(score) self.add_visit() return True else: return False def add_score(self,", "for column, row in helpers.available_positions(board): board[column,row] = self.color new_board = deepcopy(board) board[column,row] =", "the method necessary to simulate matches.\"\"\" def simulate(self, board, color=-1): \"\"\"Simulate a match", "super().__init__() if parent: self._hash = self._gen_hash() self._save() def __init_subclass__(cls): if 'rollout_score' not in", "the total number of score is the sum of all scores made by", "score (float), evaluation, for the Node board state (`self.board`), from the perspective of", "return node def _get_memory(self, board, color): hash = self.hash(board, color) if hash in", "winner = helpers.check_winner(sim_board) if winner: return winner return 0 class DepthMeasure: \"\"\"Use this", "memory is not None else parent.memory self.__score = 0 self._visits = 0 self._children", "self.start_timer(...) while not self.time_out: # place the line \"DepthMeasure.add()\" inside the # search", "visit of the Node, the value is stored in the zobrist table and", "the # search method, when it creates a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print()", "float, total number of score divided by the number of visits - the", "= 0 deepiest = 0 @staticmethod def start(): \"\"\"Reset all the variables to", "@staticmethod def print(): \"\"\"Print the deepiest depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class", "+= 1 @staticmethod def reset(): \"\"\"Reset the depth before start a new search", "(**), default 1 (*) required when creating a new root `Node` object. (**)", "required, board state memory: empty dictionary, required(*), default None - this dictionary will", "when creating a new root `Node` object. (**) required when creating a new", "deepiest till know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth =", "the deepiest till know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth", "dictionary, required(*), default None - this dictionary will store all searches with zobrist", "it creates a new depth self.run_search() DepthMeasure.reset() DepthMeasure.print() return ... ``` \"\"\" current_depth", "new instance of `Node` object. \"\"\" node_type = type(self) node = node_type(board=board, parent=self,", "deepcopy from . import Strategy, RandomStrategy, ZobristHashingStrategy from . import RandomStrategy from ...", "import BadImplementation class SimulationStrategy(RandomStrategy): \"\"\"Simulation Stragegy provide the method necessary to simulate matches.\"\"\"", "instance of `Node` object. \"\"\" node_type = type(self) node = node_type(board=board, parent=self, position=column,", "searches with zobrist hashing. parent: `Node` object, required (**), default None - Node", "score = self.rollout_score() self.add_score(score) self.add_visit() return True else: return False def add_score(self, value):", "color: int, required (**), default 1 (*) required when creating a new root", "Properties UCB1: float, UCB1 value (*) of the node. visits: int, total number", "= color color = 1 if color == -1 else -1 winner =", "1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def reset(): \"\"\"Reset the depth before start", "effort for exploration and exploitation in the Monte Carlo Tree Search strategy. #", "deepiest = 0 @staticmethod def start(): \"\"\"Reset all the variables to begin a", "if self.parent: self.parent.add_visit() def children(self): \"\"\"Get all the childen Nodes. Generate or get", "more 1 depth.\"\"\" DepthMeasure.current_depth += 1 @staticmethod def reset(): \"\"\"Reset the depth before", "will return `False`. Each rollout adds a visit in the counter and the", "DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def print(): \"\"\"Print the deepiest depth reached.\"\"\" print('Last", "Example ``` class AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...) while not self.time_out:", "the owner of the board state # Return Id of the winner of", "Each node is generated from the available possitions in the `board` of the", "memory if memory is not None else parent.memory self.__score = 0 self._visits =", "the childen Nodes. Generate or get from the memory, all the childen Nodes.", "None def new_node(self, board, column): \"\"\"This method is called by `children` method to", "value: float, total number of score divided by the number of visits -", "= 0 @staticmethod def start(): \"\"\"Reset all the variables to begin a new", "if DepthMeasure.current_depth > DepthMeasure.deepiest: DepthMeasure.deepiest = DepthMeasure.current_depth DepthMeasure.current_depth = 0 @staticmethod def print():", "the depth of a tree search. Useful when debugging and measuring performance optimization.", "\"\"\" if self.parent and self._visits == 0: score = self.rollout_score() self.add_score(score) self.add_visit() return", "table__ to optimize the search. # Arguments board: matrix, required, board state memory:", "performance optimization. # Example ``` class AgentNew(AgentBase, TimerStrategy): def action(self, board): DepthMeasure.start() self.start_timer(...)", "class uses a __zobrist hashing table__ to optimize the search. # Arguments board:", "the line \"DepthMeasure.add()\" inside the # search method, when it creates a new", "`board` to the Node and all the parents above in the tree. #", "required, id of the owner of the board state # Return Id of", "distribution of search effort for exploration and exploitation in the Monte Carlo Tree", "AgentMCTSNN: [documentation](./agents#agentmctsnn) \"\"\" def __init__(self, board, memory=None, parent=None, position=None, color=1): self.board = board", ". import RandomStrategy from ... import helpers from ...exceptions import BadImplementation class SimulationStrategy(RandomStrategy):", "depth reached.\"\"\" print('Last play depth:', DepthMeasure.deepiest) class Node(SimulationStrategy, ZobristHashingStrategy): \"\"\"Node class is a", "the Node `board` \"\"\" pass def rollout(self): \"\"\"Node rollout. It will execute a", "= self.rollout_score() self.add_score(score) self.add_visit() return True else: return False def add_score(self, value): self.__score", "current depth if it's the deepiest till know. \"\"\" if DepthMeasure.current_depth > DepthMeasure.deepiest:", "the value is stored in the zobrist table and it is __not__ recalculated", "of score divided by the number of visits - the total number of", "When inherited it's required to implement the `rollout_score` method which is an evaluation", "while not end: column = self.random_choice(sim_board) if column == None: #no options left", "of score is the sum of all scores made by the Node and" ]
[ "import textgrid import sys if len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg =", "textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list() for i in tg[0]: if i.mark", "!= '': if not started: start = i.minTime started = True else: started", "python import textgrid import sys if len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg", "i.mark != '': if not started: start = i.minTime started = True else:", "= i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start = 0.0", "text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start = 0.0 end =", "else: if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start = 0.0 end = 0.0", "in tg[0]: if i.mark != '': if not started: start = i.minTime started", "end = i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start =", "False start=0.0 end=0.0 text=list() for i in tg[0]: if i.mark != '': if", "import sys if len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started", "if not started: start = i.minTime started = True else: started = True", "[filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list() for i", "quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list() for i in", "print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list() for", "end, ' '.join(text))) start = 0.0 end = 0.0 text.clear() started = False", "2: print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list()", "!= 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0", "if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start = 0.0 end = 0.0 text.clear()", "True end = i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start", "started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start = 0.0 end = 0.0 text.clear() started", "i.minTime started = True else: started = True end = i.maxTime text.append(i.mark) else:", "tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list() for i in tg[0]:", "= True end = i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text)))", "not started: start = i.minTime started = True else: started = True end", "print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start = 0.0 end = 0.0 text.clear() started =", "for i in tg[0]: if i.mark != '': if not started: start =", "= True else: started = True end = i.maxTime text.append(i.mark) else: if started:", "start=0.0 end=0.0 text=list() for i in tg[0]: if i.mark != '': if not", "= textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0 end=0.0 text=list() for i in tg[0]: if", "text=list() for i in tg[0]: if i.mark != '': if not started: start", "start = i.minTime started = True else: started = True end = i.maxTime", "started = False start=0.0 end=0.0 text=list() for i in tg[0]: if i.mark !=", "True else: started = True end = i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start,", "= False start=0.0 end=0.0 text=list() for i in tg[0]: if i.mark != '':", "if len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False", "tg[0]: if i.mark != '': if not started: start = i.minTime started =", "'': if not started: start = i.minTime started = True else: started =", "started: start = i.minTime started = True else: started = True end =", "i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end, ' '.join(text))) start = 0.0 end", "started = True else: started = True end = i.maxTime text.append(i.mark) else: if", "textgrid import sys if len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1])", "sys if len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started =", "started = True end = i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end, '", "= i.minTime started = True else: started = True end = i.maxTime text.append(i.mark)", "len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit() tg = textgrid.TextGrid.fromFile(sys.argv[1]) started = False start=0.0", "else: started = True end = i.maxTime text.append(i.mark) else: if started: print('{}\\t{}\\t{}'.format(start, end,", "if i.mark != '': if not started: start = i.minTime started = True", "end=0.0 text=list() for i in tg[0]: if i.mark != '': if not started:", "#!/usr/bin/env python import textgrid import sys if len(sys.argv) != 2: print(\"textgrid-to-audacity.py [filename]\") quit()", "i in tg[0]: if i.mark != '': if not started: start = i.minTime" ]
[ "empty test_<db name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom ddl", "that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path)) with open(script_path,", "Super will create an empty test_<db name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) #", "django.conf import settings from django.db import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\"", "in shared environments we assume DB control in testing\"\"\" # Super will create", "# Super will create an empty test_<db name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs)", "LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though our schema is readonly", "from django.test.runner import DiscoverRunner from django.conf import settings from django.db import connections logger", "\"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though our schema is readonly in", "import settings from django.db import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See", "import logging from django.test.runner import DiscoverRunner from django.conf import settings from django.db import", "ddl to create the schema after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB", "class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though our schema is", "testing\"\"\" # Super will create an empty test_<db name> automatically config = super(LegacyDiscoverRunner,", "config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom ddl to create the schema", "DiscoverRunner from django.conf import settings from django.db import connections logger = logging.getLogger(__name__) class", "contextlib import os import logging from django.test.runner import DiscoverRunner from django.conf import settings", "create an empty test_<db name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any", "logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though our schema", "will create an empty test_<db name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke", "from django.db import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\"", "schema is readonly in shared environments we assume DB control in testing\"\"\" #", "import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self,", "https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though our schema is readonly in shared environments", "settings from django.db import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner", "DB control in testing\"\"\" # Super will create an empty test_<db name> automatically", "self).setup_databases(**kwargs) # Invoke any custom ddl to create the schema after that. script_path", "create the schema after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script.", "open(script_path, 'r') as sql_file: ddl = sql_file.read() cursor = connections['legacy'].cursor() cursor.executescript(ddl) return config", "in testing\"\"\" # Super will create an empty test_<db name> automatically config =", "See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though our schema is readonly in shared", "import os import logging from django.test.runner import DiscoverRunner from django.conf import settings from", "logging from django.test.runner import DiscoverRunner from django.conf import settings from django.db import connections", "import DiscoverRunner from django.conf import settings from django.db import connections logger = logging.getLogger(__name__)", "assume DB control in testing\"\"\" # Super will create an empty test_<db name>", "name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom ddl to create", "\"\"\"Though our schema is readonly in shared environments we assume DB control in", "super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom ddl to create the schema after that.", "from django.conf import settings from django.db import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner):", "custom ddl to create the schema after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing", "DB with script. [Path: {}]\".format(script_path)) with open(script_path, 'r') as sql_file: ddl = sql_file.read()", "= os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path)) with open(script_path, 'r') as", "script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path)) with open(script_path, 'r')", "is readonly in shared environments we assume DB control in testing\"\"\" # Super", "logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path)) with open(script_path, 'r') as sql_file: ddl =", "automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom ddl to create the", "setup_databases(self, **kwargs): \"\"\"Though our schema is readonly in shared environments we assume DB", "script. [Path: {}]\".format(script_path)) with open(script_path, 'r') as sql_file: ddl = sql_file.read() cursor =", "environments we assume DB control in testing\"\"\" # Super will create an empty", "= logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though our", "**kwargs): \"\"\"Though our schema is readonly in shared environments we assume DB control", "we assume DB control in testing\"\"\" # Super will create an empty test_<db", "os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path)) with open(script_path, 'r') as sql_file:", "control in testing\"\"\" # Super will create an empty test_<db name> automatically config", "any custom ddl to create the schema after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql')", "the schema after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path:", "# Invoke any custom ddl to create the schema after that. script_path =", "Invoke any custom ddl to create the schema after that. script_path = os.path.join(settings.MANAGE_ROOT,", "with script. [Path: {}]\".format(script_path)) with open(script_path, 'r') as sql_file: ddl = sql_file.read() cursor", "logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs): \"\"\"Though", "to create the schema after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with", "readonly in shared environments we assume DB control in testing\"\"\" # Super will", "def setup_databases(self, **kwargs): \"\"\"Though our schema is readonly in shared environments we assume", "an empty test_<db name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom", "os import logging from django.test.runner import DiscoverRunner from django.conf import settings from django.db", "\"\"\" def setup_databases(self, **kwargs): \"\"\"Though our schema is readonly in shared environments we", "schema after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path))", "{}]\".format(script_path)) with open(script_path, 'r') as sql_file: ddl = sql_file.read() cursor = connections['legacy'].cursor() cursor.executescript(ddl)", "our schema is readonly in shared environments we assume DB control in testing\"\"\"", "shared environments we assume DB control in testing\"\"\" # Super will create an", "'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path)) with open(script_path, 'r') as sql_file: ddl", "with open(script_path, 'r') as sql_file: ddl = sql_file.read() cursor = connections['legacy'].cursor() cursor.executescript(ddl) return", "import contextlib import os import logging from django.test.runner import DiscoverRunner from django.conf import", "after that. script_path = os.path.join(settings.MANAGE_ROOT, 'legacy-schema.sql') logger.info(\"Initializing DB with script. [Path: {}]\".format(script_path)) with", "django.test.runner import DiscoverRunner from django.conf import settings from django.db import connections logger =", "connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def setup_databases(self, **kwargs):", "[Path: {}]\".format(script_path)) with open(script_path, 'r') as sql_file: ddl = sql_file.read() cursor = connections['legacy'].cursor()", "= super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom ddl to create the schema after", "django.db import connections logger = logging.getLogger(__name__) class LegacyDiscoverRunner(DiscoverRunner): \"\"\" See https://docs.djangoproject.com/en/1.7/topics/testing/advanced/#defining-a-test-runner \"\"\" def", "test_<db name> automatically config = super(LegacyDiscoverRunner, self).setup_databases(**kwargs) # Invoke any custom ddl to" ]
[ ") bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time", "= 'Эти настройки можно получить в любое время и отправить @f0rden для восстановления", "bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode = 'HTML' )", "utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda c:", "bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func =", "c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id ) if", "= chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1,", "telebot.TeleBot(token = secret_config.token) my_info = bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info", "= 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id =", "def bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id,", "@bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id =", "btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text", "utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker',", "text = 'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "= c.id, show_alert = True, text = 'Вы не являетесь администратором. Текущий статус", "lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings =", "недостаточно прав для выполнения этого действия.' ) else: if c.from_user.id == user_id or", "chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 =", "try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg,", "utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private') def", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id =", "= settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings))", "1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "msg.chat.type != 'private') def bot_set_text(msg): start_time = time.time() message = msg if len(msg.text)", "def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state =", "'supergroup') def bot_answ(msg): start_time = time.time() message = msg kb = types.InlineKeyboardMarkup() r", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private') def bot_kick(msg): start_time = time.time()", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] =", "btn = types.InlineKeyboardButton(text = 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "= c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id =", "create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data =", "settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup(", "новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка", "'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message(", "'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.'", "msg.chat.id, 'Правила изменены' ) else: bot.send_message( msg.chat.id, text = 'Правила составлены неверно' )", "подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id", "func=lambda msg: msg.chat.type == 'private') def bot_user_start(msg): message = msg start_time = time.time()", "'days') if uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days !=", "callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id", "callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id", "utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1':", "можно получить в любое время и отправить @f0rden для восстановления их, в случае", "True, text = 'У вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func", "приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id", "import re import ssl import subprocess import threading import time from multiprocessing import", "types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb =", "bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker', 'video',", "= '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn =", "uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days !=", "text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception as e: logging.error(e) else:", ") bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id )", "api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id", "callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data", "if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id)", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c:", "c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id,", "types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c:", "'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']:", "= lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if", "= types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен.' )", "parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo',", "= ['audio', 'document', 'photo', 'sticker', 'video', 'video_note', 'voice', 'location', 'contact'], func = lambda", "def bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'],", "def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: t", "= 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func = lambda msg: msg.chat.type == 'supergroup')", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker', 'video', 'video_note', 'voice', 'location', 'contact'],", "msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func = lambda msg: msg.chat.type", "btn3 = types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас:", "@bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c)", "bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove()", "сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text", "c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' )", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def warns_count_change(c):", "= api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600)", "types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5,", "lambda msg: msg.chat.type != 'private') def bot_reset_settings(msg): start_time = time.time() kb = types.InlineKeyboardMarkup()", "api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML'", "text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode = 'HTML',", "time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func = lambda msg: msg.chat.type", "else: t = Thread(target = utils.make_broadcast, kwargs = { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'],", "callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id =", "@bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id, words[2]", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c)", "btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start", "user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text = 'Принимать", "not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1", "types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение',", "is not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not", "= 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Мои чаты', callback_data", "= ['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time = time.time()", "= 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 =", "= 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return", "bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda msg:", "= types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "== secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id )", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return", "диалоги', callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только чаты', callback_data = 'broadcast_check::chats')", "False status2 = False for i in admins: if i.user.id == user_id: if", "lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications')", "api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH,", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker', 'video', 'video_note', 'voice', 'location',", "func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time = time.time() if", "bot_check_text(msg): start_time = time.time() msg_text = msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg) and", "await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle)", "+ WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server", "def bot_user_start(msg): message = msg start_time = time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer", "callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text =", "c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен'", "inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m,", "user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception as e: print(e) bot.reply_to( msg,", "else: if c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id", "welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func =", "btn = types.InlineKeyboardButton(text = 'Автоматический read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data", "chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий", "'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id", "time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func = lambda msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id))", "btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1]", "utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def", "api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda c:", "= api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'],", "inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos =", "def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id", "= 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id", "WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time =", "start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func = lambda", "if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]])", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id,", "types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан',", "bot_ping(msg): start_timee = time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds =", "msg: msg.chat.type != 'private') def bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands", "'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "@bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id =", "chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 =", "def del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1] )", "недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func =", "bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func = lambda c:", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id))", "= api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id )", "msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time = time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id,", "Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE =", "logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__ ==", "'private') def bot_check_text(msg): start_time = time.time() msg_text = msg.text msg_text_low = msg_text.lower() if", "c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id) m", "return kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func =", "= lambda c: c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id user_settings", "my_info = bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id =", "c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id,", "= telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard", "рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard", "= 'Данное приветствие не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type !=", "= text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter'])", "= lambda c: c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id,", "msg.chat.id == 303986717) def bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode", "kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c))", "'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад',", "text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands = ['set_rules'], func = lambda", "callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id", "msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup =", "is not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg,", "if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id ) bot.edit_message_text(", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id =", "= types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu') btn2", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup(", "являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def", "utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'],", "types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text =", "text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func =", "utils.remove_log_channel(msg.chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif", "True ) @bot.callback_query_handler(func = lambda c: c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def", "'ban_me_please'], func = lambda msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time = time.time()", "telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') if", "изменено' ) else: bot.send_message( msg.chat.id, text = 'Данное приветствие не работает' ) utils.new_update(msg,", "time.time()-start_time) @bot.message_handler(commands = ['unban'], func = lambda msg: msg.chat.type == 'supergroup') def bot_user_unban(msg):", "keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический", "show_alert = True, text = 'У вас недостаточно прав для выполнения этого действия.'", "отправлены', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data == 'stats_menu') def", "= c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def new_users_ro(c):", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func =", "= lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if", "= time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id,", "types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик',", "msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time = time.time() if msg.text == '/ban_me_please': t", "# def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings", "True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func", "types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи', 'chats': 'диалоги',", "хотите сбросить настройки?', reply_markup = kb ) @bot.message_handler(commands = ['update_time'], func = lambda", "= 'HTML' ) bot.answer_callback_query( c.id, text = 'Список отправлен', show_alert = True )", "keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id)", "= int(time.time()) bot = telebot.TeleBot(token = secret_config.token) my_info = bot.get_me() telebot_logger = logging.getLogger('telebot')", "msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type == 'channel': bot.send_message(", "user = bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id,", "= '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data", "multiprocessing import Process as Thread import telebot from aiohttp import web from telebot", "def bot_ban_me_please(msg): start_time = time.time() if msg.text == '/ban_me_please': t = random.randint(1, 10)", "= warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else:", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Выберите тип рассылки' )", "bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'], func = lambda msg: msg.chat.type ==", "['settings'], func = lambda msg: msg.chat.type == 'supergroup') def bot_answ(msg): start_time = time.time()", "= msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if", "= 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id))", "parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if", "= api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings))", "= chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4", "вас недостаточно прав для выполнения этого действия.' ) else: if c.from_user.id == user_id", "r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created',", "callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1,", "reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c: c.data == 'to_main_menu') def bot_to_main_menu(c):", "types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only", "c.message.message_id, reply_markup = kb ) t.start() t.join() @bot.callback_query_handler(func = lambda c: c.data ==", "'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять", "def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func", "= 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda", "group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i)", "'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id )", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id =", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def", "unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id =", "not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time = time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id", "= utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) )", "in ['group', 'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and not", "= secret_config.token) my_info = bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info =", "bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup", "if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date", "text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode = 'HTML' ) @bot.message_handler(commands = ['leave'], func =", "'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id,", "'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data =", "изменены' ) else: bot.send_message( msg.chat.id, text = 'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time)", "ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id,", "'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = create_user_language_keyboard() )", "config import secret_config import text import ujson import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT", "True and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not True: bot.send_message(", "@bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']:", "c.message.chat.id, message_id = c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' )", ") if msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id )", "generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id", "True, text = 'У вас недостаточно прав для выполнения этого действия. Текущий статус", "'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN,", "types.InlineKeyboardButton(text = 'Изменить язык', callback_data = 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text =", "callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings", "kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text", "= lambda c: c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id", "time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg):", "действия.' ) else: if c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member(", "text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id =", "time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id):", "True and status2 is True: utils.remove_log_channel(msg.chat.id) elif status1 is not True: bot.send_message( msg.chat.id,", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count", "создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S", "not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time() bot.send_message(", "msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'], func = lambda msg: msg.chat.type == 'supergroup')", "import secret_config import text import ujson import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT =", "= Thread(target = utils.check_deleting_queue) t.start() async def handle(request): if request.match_info.get('token') == bot.token: request_body_dict", "bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id", "start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode", "для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text = 'Настройки отправлены',", "= lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if", "def bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time)", "останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width =", "callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for", "def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text =", "def bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time)", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def", "def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data =", "keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data = 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb", "kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Рассылка',", "'Список отправлен', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data in ['my_chats',", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message(", "подтверждены.' ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У", "c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else:", "my_info.id: status2 = True if status1 is True and status2 is True: utils.set_log_channel(msg.chat.id,", "'settings')) btns = [] for i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'],", "msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message(", "bot_ban_me_please(msg): start_time = time.time() if msg.text == '/ban_me_please': t = random.randint(1, 10) ban_time", "api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode", "c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id)", "= ['dellog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is", "к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\"", "'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text", "= 1) btn1 = types.InlineKeyboardButton(text = 'Мои чаты', callback_data = 'my_chats') btn2 =", "if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id,", "utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id,", "text = 'Ваше меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width =", "inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode = 'HTML'", "= c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c: c.data == 'broadcast_menu')", "text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id =", "8443 (порт должен быть открыт!) # На некоторых серверах придется указывать такой же", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) #", "'admin_menu')) return kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text", "telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список", "msg: msg.chat.type == 'supergroup') def bot_answ(msg): start_time = time.time() message = msg kb", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id", "= lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is not None and", "web from telebot import types import api import cherrypy import config import secret_config", "msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id", "придется указывать такой же IP, что и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT =", "ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи', 'chats': 'диалоги', 'all': 'все' } btn1 =", "msg.chat.type != 'private') def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands =", "= time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg:", "chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 =", "welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id)", "= lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if", "callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data =", "None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg):", "time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg,", "chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 =", "= chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id))", "= types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]),", "utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id):", "lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']:", "utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id =", "ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode = 'HTML' ) else:", "text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time)", "async def handle(request): if request.match_info.get('token') == bot.token: request_body_dict = await request.json() update =", "@bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c)", "except Exception as e: print(e) @bot.message_handler(commands = ['infolog'], func = lambda msg: msg.chat.type", "ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text =", "if len(msg.text) not in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting)", "пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка предупреждений',", "delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query(", "c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def warns_count_change(c):", "if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' ) else: bot.send_message( msg.chat.id, text", "utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func = lambda msg: msg.chat.type ==", "= c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query(", "msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for i in", "= types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text =", "c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text", "utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id,", "elif msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): #", "if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'],", "= msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else:", ") except Exception as e: print(e) @bot.message_handler(commands = ['infolog'], func = lambda msg:", "== 'supergroup') def bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg,", "btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data", "uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0: working_time_str", "message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time", "'./webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному ключу", "= 'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id", "= 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn", "message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.'", "callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data =", "telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard =", "c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.'", "= logging.INFO ) app = web.Application() t = Thread(target = utils.check_deleting_queue) t.start() async", "]) def bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg,", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id", ") # @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id =", "try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 = False for i in", "start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id = '' if", "= generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c: c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text(", "= c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "if working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def del_url(c): chat_id", "utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text =", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id,", "c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id,", "сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data =", "= 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time =", ") @bot.message_handler(commands = ['leave'], func = lambda msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id,", "443, 80, 88 или 8443 (порт должен быть открыт!) # На некоторых серверах", "btn = types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id))", "msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt", "msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif", "= types.InlineKeyboardButton(text = 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p',", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def ro_time_change(c):", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0])", "'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка", "types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1',", "'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text", "aiohttp import web from telebot import types import api import cherrypy import config", "= types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический read-only", "= types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4)", "btn = types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", ") else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception", "= delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Статус настройки:", "import types import api import cherrypy import config import secret_config import text import", "user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id =", "status2 = True if status1 is True and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id)", "and status2 is True: utils.remove_log_channel(msg.chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text", "= str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days != 0:", "msg: msg.chat.type != 'private') def bot_reset_settings(msg): start_time = time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count if settings['warns']['count']", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "= chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id =", "text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "@bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id,", "msg.chat.type != 'private') def bot_reset_settings(msg): start_time = time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text =", "kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu')", "False for i in admins: if i.user.id == user_id: if i.status == 'creator':", "parse_mode = 'HTML' ) bot.answer_callback_query( c.id, text = 'Список отправлен', show_alert = True", "c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id =", "def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше", "def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text = 'Эти настройки", "chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c))", "= ['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time", "'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type", "cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "'Приветствие изменено' ) else: bot.send_message( msg.chat.id, text = 'Данное приветствие не работает' )", "@bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c)", "time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private') def bot_kick(msg): start_time = time.time() utils.kick_user(msg)", "= text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username']", "func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m", "def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats',", "этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def", "'settings') btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id)) keyboard.add(btn) btn", "= types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data =", "= 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия:", "utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru')", "utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func = lambda", "cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query(", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id", "= 'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id", "!= 'private') def bot_get_rules(msg): start_time = time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode =", "not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate'])", "time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time = time.time() if", "api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) )", "[9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены'", "'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id", "= False status2 = False for i in admins: if i.user.id == user_id:", "api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id = c.message.message_id", "'user_id': c.from_user.id, 'message_id': c.message.message_id } ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное", ") utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id", "= c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats", "text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func = lambda", "c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id,", "settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id',", "действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def to_deletions(c):", "некоторых серверах придется указывать такой же IP, что и выше WEBHOOK_LISTEN = '0.0.0.0'", "import ssl import subprocess import threading import time from multiprocessing import Process as", "warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func =", "= logging.getLogger('reports') if __name__ == '__main__': log_name = 'logs.txt' f = open(log_name,'w') f.close()", "= '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5', callback_data", "message_id = c.message.message_id, reply_markup = kb ) t.start() t.join() @bot.callback_query_handler(func = lambda c:", "lambda msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands", "приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text =", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id", "отменен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if", "types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда',", "types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb", "= utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) )", "'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2)", "= types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4)", "types.InlineKeyboardButton(text = 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "== 'supergroup') def bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings))", "callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else:", "'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "= 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id =", "= types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard =", "= '' if chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else:", "= 'Рассылка-проверка', callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data =", "reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'],", "ваших групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_groups(user_id)", "c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "@bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type = re.split('::',", "= 'HTML' ) @bot.message_handler(commands = ['rmkb'], func = lambda msg: msg.chat.type in ['group',", "bot.send_message( msg.chat.id, text = 'Данное приветствие не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda", "= lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg)", "def del_warns(c): user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id)", "keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type in config.available_attachments:", "= 'admin_menu')) return kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 =", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'Вы", "utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id)", "def bot_set_rules(msg): start_time = time.time() message = msg if len(msg.text) not in [9,", "working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg):", "def bot_answ(msg): start_time = time.time() message = msg kb = types.InlineKeyboardMarkup() r =", "=['setlog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is not", "btn = types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func = lambda", "4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data", "in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила", "int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count']", "msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id,", "start_time = time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id)", "utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type == 'channel': bot.send_message( msg.chat.id,", "'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width", "'ru') if working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format(", "working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time =", "start_time = time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id", ") @bot.message_handler(commands = ['rmkb'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def", "btn1 = types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text", "logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time() uptime =", "and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name", "bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.", "utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'],", "config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time =", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func = lambda msg: msg.chat.type != 'private')", "utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены' ) else: bot.send_message( msg.chat.id, text =", "msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id", "main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__ == '__main__': log_name = 'logs.txt'", "text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode = 'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format(", ") bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query(", "time.time() admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id = '' if chat.username: if", "['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time = time.time() message", "= 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id =", "в любое время и отправить @f0rden для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) )", "callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: t = Thread(target =", "приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить дамп", "= types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb", "== bot.token: request_body_dict = await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else:", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id,", "= 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять приветствие в чат:", "parse_mode = 'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML'", "ban_time = 60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() +", "def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id,", "= '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5', callback_data", "c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha'))", "= lambda msg: msg.chat.type == 'supergroup') def bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id,", "bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time() admins", "'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn", "= text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off']", "= api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['infolog'],", "generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words = re.split('::', c.data) lang =", "4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический read-only на {} час", "in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие", "utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type !=", "= types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text =", "change_language(c): words = re.split('::', c.data) lang = words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id", "= ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands = ['voteban']) #", "'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0])", "text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] )", "kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data =", "settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "= 'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func = lambda", "welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query(", "else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception as", "btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb():", "msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode =", "settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text = 'Эти", "= msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 = False for", "@bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type != 'private') def bot_set_text(msg): start_time = time.time()", "bot_report(msg): start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id = ''", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id == 303986717) def bot_text(msg): start_time", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'Вы", "'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id)", "c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "message = msg kb = types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки отправлены вам", "context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN, port=WEBHOOK_PORT, ssl_context=context, ) #", "@bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat,", "settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time']", "= logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__ == '__main__': log_name", "удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text", "= lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time = time.time() if utils.is_restricted(msg):", "utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda", "= lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data =", "msg: msg.chat.type == 'private') def bot_user_start(msg): message = msg start_time = time.time() if", "lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "func = lambda msg: msg.chat.type != 'private') def bot_set_text(msg): start_time = time.time() message", "bot.send_message( user_id, m, parse_mode = 'HTML' ) bot.answer_callback_query( c.id, text = 'Список отправлен',", "обнулены.', chat_id = c.message.chat.id, message_id = c.message.message_id ) else: bot.answer_callback_query( callback_query_id = c.id,", "subprocess import threading import time from multiprocessing import Process as Thread import telebot", "text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count()", "= c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query(", "0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id)", "= 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda", "@bot.message_handler(commands = ['reset_settings'], func = lambda msg: msg.chat.type != 'private') def bot_reset_settings(msg): start_time", "@bot.callback_query_handler(func = lambda c: c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id,", "lambda msg: msg.chat.type != 'private') def bot_set_text(msg): start_time = time.time() message = msg", "inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode =", "ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn)", "callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id =", "text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func =", "keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type", "kb.add(btn) return kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn", "for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return", "сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10', callback_data", "= 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn", "['group', 'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text", "'./webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH", "= types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t", "'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Список ваших групп'", "def bot_reset_settings(msg): start_time = time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс',", "%p', level = logging.INFO ) app = web.Application() t = Thread(target = utils.check_deleting_queue)", "все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id", "reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception as e: logging.error(e) else: utils.ban_user(msg)", "= c.message.message_id, text = text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users =", "group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]),", "системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать", "язык', callback_data = 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data", "'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id }", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg)", "parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private') def bot_about(msg): start_time", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id", "= 'У вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda", "keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id =", "generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data =", "chat_id = utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup(", "'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query(", "utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c))", "bot_get_rules(msg): start_time = time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' ) except", "text = 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id", "import Process as Thread import telebot from aiohttp import web from telebot import", "kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять", "btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text", "def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id = c.message.chat.id,", "unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda c: c.data", "c.message.chat.id, message_id = c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id, text", "= types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code =", "= lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id,", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) )", "settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "= 'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id = c.message.message_id ) else: bot.answer_callback_query( callback_query_id", "utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i in inviters: inviter_info =", "func=lambda msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id)", "= types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb", "lambda c: c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url')", "t.start() t.join() @bot.callback_query_handler(func = lambda c: c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c):", "разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn", "%I:%M:%S %p', level = logging.INFO ) app = web.Application() t = Thread(target =", "utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id =", "i.status == 'creator': status1 = True if i.user.id == my_info.id: status2 = True", "if settings['warns']['count'] < 1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "= 'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "= types.InlineKeyboardButton(text = 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time)", "callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить дамп настроек',", "= 'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_admin_menu_kb()", "utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time = time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id )", "if utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user,", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c)", "request.match_info.get('token') == bot.token: request_body_dict = await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response()", "'Автоматический read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id))", "time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type !=", "in ['group', 'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id", "start_time = time.time() if msg.text == '/ban_me_please': t = random.randint(1, 10) ban_time =", "not None and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id =", "callback_data = 'admin_menu')) return kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1", "api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c) if", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer'))", "callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6) btn =", "= c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход", "= types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for i in user_settings['admined_groups']:", "utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not", "show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data == 'stats_menu') def bot_stats_menu(c):", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id", "= types.InlineKeyboardButton(text = 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn =", "btn6) btn = types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state", "msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin')", "msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): # if", "if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] ==", "utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member(", "), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id,", "def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func", "bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( # msg.chat.id, # text. # ) @bot.message_handler(commands =", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, ) #", "chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id)", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' )", "words = utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message", "start_time = time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data =", "all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func =", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def del_url(c): chat_id =", "chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 =", "= api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] =", "keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn =", "@bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private') def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg,", "@bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time =", "['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands = ['voteban']) # def", "c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.'", "as Thread import telebot from aiohttp import web from telebot import types import", "uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time", "func=lambda msg: msg.chat.type == 'private') def bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'],", "= 'HTML' ) @bot.message_handler(commands = ['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def", "= 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id =", "def bot_set_text(msg): start_time = time.time() message = msg if len(msg.text) not in [9,", "types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id =", "c: c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "= 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id =", ") def bot_del_log(msg): print(1) user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 =", "chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id))", "bot.send_message( # msg.chat.id, # text. # ) @bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message(", "types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1',", "m, parse_mode = 'HTML' ) @bot.message_handler(commands = ['leave'], func = lambda msg: msg.chat.type", "generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings", "msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id", "warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text =", "c.message.chat.id, message_id = c.message.message_id, text = text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats = api.get_chats_count(),", "callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id", "types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id", "utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time", "datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days')", "def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Выберите тип", "chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 =", "= welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else:", "def generate_leave_kb(msg): chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да,", ") else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id,", "api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]),", "callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id =", "msg.chat.id, text = 'Данное приветствие не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg:", "= 'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' )", "= int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] =", "'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2',", "utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg):", "'У вас недостаточно прав для выполнения этого действия.' ) else: if c.from_user.id ==", ") elif status2 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except", "= 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id", "callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id", "='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id))", ") bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func", "handle(request): if request.match_info.get('token') == bot.token: request_body_dict = await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update])", "= '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3, btn4) btn =", "= 'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4)", "uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message(", "api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda c: c.data == 'change_lang')", "= 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time =", "= 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад',", "= '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10', callback_data", "= utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user =", "btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3,", "), parse_mode='HTML' ) except Exception as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping'])", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id == 303986717) def bot_text(msg):", "types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное количество исключений:", "delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type in config.available_attachments: btn =", "time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str =", "= 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id", "Exception as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee =", ") bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'], func = lambda msg: msg.chat.type", "% (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time = int(time.time()) bot =", "if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg,", "# bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, # user_name =", "def bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title", "{}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private') def", "), parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message( msg.chat.id,", "= types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb", "= bot.reply_to( msg, 'Настройки отправлены вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить',", "func=lambda msg: msg.chat.type != 'private') def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time)", "parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func = lambda msg:", "kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for i in", "'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width", "'Правила изменены' ) else: bot.send_message( msg.chat.id, text = 'Правила составлены неверно' ) utils.new_update(msg,", "= lambda msg: msg.chat.type != 'private') def bot_check_text(msg): start_time = time.time() msg_text =", "= text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "= { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id': '', 'user_id':", "func = lambda msg: msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time = time.time() if", "user_id = int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None:", "WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time = int(time.time()) bot = telebot.TeleBot(token =", "'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width", "bot.send_message( msg.chat.id, text = 'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'],", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id =", "btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор", "types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "= 0 for i in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter += 1", ") except Exception as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg):", "'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width =", "= bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name", "# coding: utf8 import datetime import logging import random import re import ssl", "c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id,", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id =", "reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type != 'private') def", "reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' )", "del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c))", "btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id =", "bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands =", "msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) # ),", "= 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "= 'Ввести сообщение', callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data", "True if i.user.id == my_info.id: status2 = True if status1 is True and", "Exception: bot.reply_to( msg, text = '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func", "c.message.chat.id, message_id = c.message.message_id, text = text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id", "lang_keyboard = types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code", "bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен.' ) @bot.callback_query_handler(func = lambda c:", ") bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] )", "if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda", "return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i in config.languages:", "= types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text =", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id))", "bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else: bot.send_message(", "bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header']", "= 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data =", "bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member(", "message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id, text =", "c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only'))", "= types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения", "sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__ == '__main__':", "= unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']:", "utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] )", "= utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m,", "'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time = time.time()", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", "time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg,", "= api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def del_system(c): chat_id", "@bot.message_handler(commands = ['leave'], func = lambda msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id))", "parse_mode = 'HTML' ) @bot.message_handler(commands = ['leave'], func = lambda msg: msg.chat.type !=", "c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda c:", "@bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0]", "return keyboard def generate_leave_kb(msg): chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text", "parse_mode = 'HTML' ) @bot.message_handler(commands = ['rmkb'], func = lambda msg: msg.chat.type in", ") kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup(", ") app = web.Application() t = Thread(target = utils.check_deleting_queue) t.start() async def handle(request):", "time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func = lambda msg: msg.chat.id ==", "types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический read-only на", "@bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private') def bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg,", "start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda", "'cont_type': 'text', 'msg_text': '', 'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id } ) kb", "c.data.startswith('lang::')) def change_language(c): words = re.split('::', c.data) lang = words[1] bot.edit_message_text( chat_id =", "text = 'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id = c.message.message_id ) else: bot.answer_callback_query(", "logging.INFO ) app = web.Application() t = Thread(target = utils.check_deleting_queue) t.start() async def", "msg start_time = time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id,", "True, text = 'У вас недостаточно прав для выполнения этого действия.' ) else:", "msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings))", "# api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context", "elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time)", "btn = types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "и отправить @f0rden для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text", "utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id ) if user.status in ['restricted']: bot.restrict_chat_member( chat_id,", "= 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn)", "settings[''] # api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r'))", "'<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "is True and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not True:", "msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id)", "keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text = 'Снятие ограничений разрешено", "bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' )", "c.data) lang = words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' ) else:", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id))", "= types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id))", "= lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1]", ") except Exception: bot.reply_to( msg, text = '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands =", "Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", "= types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id))", "callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id", "if arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id", "types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "= generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func", "time.time() message = msg if len(msg.text) not in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1]", "= generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type != 'private') def bot_set_text(msg):", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def", "'1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id", "msg.chat.id)) def bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id", "= 'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb =", "True: utils.remove_log_channel(msg.chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] )", "api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format(", "# parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id ==", "c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id))", "callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu'))", "if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' )", "# msg.message_id # ) # bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id =", "text = 'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c:", "callback_query_id = c.id, show_alert = True, text = 'Вы не являетесь администратором. Текущий", "'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message(", "message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c: c.data ==", "print(e) @bot.message_handler(commands = ['dellog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and", "msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить настройки?', reply_markup =", "utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title),", "== 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard =", "# Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id", "uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0],", "user = bot.get_chat_member( chat_id, user_id ) if user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id,", "api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context =", "= c.message.chat.id, message_id = c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML'", "text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML'", "status2 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as", "kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn =", "c.message.chat.id, message_id = c.message.message_id, text = 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "= msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'],", "'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text =", "= lambda msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id,", "api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg:", "start_time = time.time() msg_text = msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg) and not", "= c.message.chat.id, message_id = c.message.message_id, text = text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats =", "тип рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb()", "lambda c: c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "return kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text =", "bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type != 'private') def bot_check_text(msg): start_time =", "80, 88 или 8443 (порт должен быть открыт!) # На некоторых серверах придется", "0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0],", "text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['infolog'], func", "= kb ) @bot.message_handler(commands = ['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg):", "chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "= lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id,", "i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb", "== utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id = msg.from_user.id try: admins", "bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def", "text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup =", "['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands", "@bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted']", ") if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id):", "'', 'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id } ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text", "'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Ваше", "message_id = c.message.message_id, text = 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "# На некоторых серверах придется указывать такой же IP, что и выше WEBHOOK_LISTEN", "level = logging.INFO ) app = web.Application() t = Thread(target = utils.check_deleting_queue) t.start()", "def bot_set_log(msg): user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2", "действительно хотите сбросить настройки?', reply_markup = kb ) @bot.message_handler(commands = ['update_time'], func =", "= 'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id", "except Exception as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg,", "read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id", "chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id)", "= lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb'))", "личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup(", "= api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data =", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def", "1) btn1 = types.InlineKeyboardButton(text = 'Мои чаты', callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id", "lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time = time.time() if utils.is_restricted(msg): bot.delete_message(", "r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[", "'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func = lambda msg: msg.chat.type ==", "types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3,", "= c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход", "kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func =", "def bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id)", "and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode =", "kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb def", "WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem'", "def notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id,", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id =", "'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text =", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id =", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id", "группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "= time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg):", "-1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg):", "settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat(", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c) if", "parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id == 303986717)", "= chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1) btn1", "user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query(", "msg: msg.chat.id == 303986717) def bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption),", "'{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func = lambda msg:", "msg: msg.chat.type != 'private') def bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c) chat_id =", "= ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи', 'chats': 'диалоги', 'all': 'все' } btn1", "return kb @bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r", "'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data =", "msg.chat.id) and not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id = msg.from_user.id try: admins =", "= ['sticker'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time =", "int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode = 'HTML' ) bot.answer_callback_query( c.id, text =", "kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет,", "= c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query(", "c: c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id,", "callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' )", "c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "text = 'Список ваших групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count", "func = lambda msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message(", "= chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4", "'private') def bot_set_text(msg): start_time = time.time() message = msg if len(msg.text) not in", "c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text", "msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ),", "btn1 = types.InlineKeyboardButton(text = 'Мои чаты', callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text =", "counter, inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id,", "['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c)", "= 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id =", "api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text", "'В главное меню', callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func = lambda msg:", "варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data =", "t = t ), parse_mode = 'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason", "{ 'users': 'пользователи', 'chats': 'диалоги', 'all': 'все' } btn1 = types.InlineKeyboardButton(text = 'Только", "utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t =", "msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены' ) else: bot.send_message( msg.chat.id,", "btn2, btn3) return kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings =", "c.message.chat.id, message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c:", "text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['infolog'], func = lambda", "c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id", "перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 =", ") # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, # msg.message_id #", "def bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id )", "msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'], func = lambda", "start_time = time.time() message = msg if len(msg.text) not in [9, 21]: new_rules", "user_id ) if user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True", "'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Админка'", ") r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time", "WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST,", "func=lambda msg: msg.chat.type != 'private') def bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id)", ") utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id", "logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__ == '__main__': log_name = 'logs.txt' f =", "= 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func = lambda msg: msg.chat.type", "bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id", "bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ),", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup(", "= config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id =", "= types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast') btn2", "msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands = ['set_rules'], func =", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id", "lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "= text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg,", "welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id,", "= types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text =", "msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands = ['rmkb'], func", "utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func =", "lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time = time.time() message = msg if", "web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i", "'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu'))", "text = 'Эти настройки можно получить в любое время и отправить @f0rden для", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id =", "if i.user.id == user_id: if i.status == 'creator': status1 = True if i.user.id", "def bot_check_text(msg): start_time = time.time() msg_text = msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg)", "= msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены' ) else: bot.send_message(", "time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func = lambda msg: msg.chat.type != 'private') def bot_reset_settings(msg):", "стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно", "выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data", "msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else:", "= 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id =", "= time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str", "parse_mode='HTML' ) # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, # msg.message_id", "msg.chat.id, message_id = r.message_id, reply_markup = kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title),", "bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::'))", "- {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text =", "app = web.Application() t = Thread(target = utils.check_deleting_queue) t.start() async def handle(request): if", "c.from_user.id inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i in", "import web from telebot import types import api import cherrypy import config import", "not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights')", "else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg):", "types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text =", "types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения новых", "'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Показать приветствие',", "time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time)", "and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message(", "'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id", "msg: msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg)", "utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text", "'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private') def bot_group_start(msg):", "chat = bot.get_chat(msg.chat.id) msg_id = '' if chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id", "bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda", "c: c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup(", "callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка перед удалением", "threading import time from multiprocessing import Process as Thread import telebot from aiohttp", "bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id,", "secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands", "'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = kb ) t.start()", "config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id)", "# 443, 80, 88 или 8443 (порт должен быть открыт!) # На некоторых", "if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type ==", "for i in admins: try: bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username =", "time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type ==", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def", "= 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data", "@bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id =", "admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id = '' if chat.username: if msg.reply_to_message:", "= time.time() message = msg kb = types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки", "user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML'", "', datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO ) app = web.Application() t =", "types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb def", "text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)'])", "delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен.' ) @bot.callback_query_handler(func =", "= lambda msg: msg.chat.type != 'private') def bot_set_text(msg): start_time = time.time() message =", "msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c):", "= 'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn)", "c.message.message_id, text = text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(),", "= c.message.chat.id, message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda", "curr_settings = api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn", "Exception as e: print(e) @bot.message_handler(commands = ['dellog'], func = lambda msg: msg.chat.type in", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id) chat", "message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data ==", "if uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0:", "{} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10',", "'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data =", "utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None: user_id = msg.reply_to_message.from_user.id", "msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id,", "int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days", "= types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb", "new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "callback_query_id = c.id, show_alert = True, text = 'У вас недостаточно прав для", "из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id", "= True, text = 'У вас недостаточно прав для выполнения этого действия.' )", "= new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) )", "types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1", "'' if chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id", "'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query(", "chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id)", "bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Ваше меню' )", "'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять", "'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text", "= api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML' )", "generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data =", "bot.get_chat(msg.chat.id) msg_id = '' if chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt =", "msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode", "= 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 =", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) )", "user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg)", "= c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "c.message.chat.id, message_id = c.message.message_id, text = 'Список ваших групп' ) bot.edit_message_reply_markup( chat_id =", "= c.message.chat.id, message_id = c.message.message_id, text = 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id =", "api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r", "этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def", "'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c)", "= Thread(target = utils.make_broadcast, kwargs = { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text',", "chat_id = utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text = 'Эти настройки можно получить", "utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id,", "if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg)", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id", "= utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text = 'Эти настройки можно получить в", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time']", "в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text =", "callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id = c.id,", "= i['code']))) return lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn", "btn4) btn = types.InlineKeyboardButton(text = 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn)", "def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'],", "and not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id)", "bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports')", "@bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id,", "utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id", "del_warns(c): user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text(", "= 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = group_setting(chat_id),", "bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name)", "= user_id)) kb.add(btn) return kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text", "if msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if", "'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id =", "= c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id =", "btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text", "text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private') def bot_about(msg):", "return keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn", "= 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text =", "msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time = time.time() message = msg if len(msg.text)", "utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if", "lambda c: c.data == 'change_lang') def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id =", "new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id)", "= api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda c: c.data == 'change_lang') def bot_change_lang(c):", ") bot.answer_callback_query( c.id, text = 'Настройки отправлены', show_alert = True ) @bot.callback_query_handler(func =", "callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_'))", "text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id", "all_users = api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() )", "chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang)", "@bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) )", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Выберите тип рассылки' ) bot.edit_message_reply_markup(", "import logging import random import re import ssl import subprocess import threading import", "btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings", "text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message(", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "status1 is True and status2 is True: utils.remove_log_channel(msg.chat.id) elif status1 is not True:", "= time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func = lambda msg:", "bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id", "'Список ваших групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id,", "= types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']),", "btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В", "+ ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode = 'HTML' )", "btn2, btn3) kb.add(btn4, btn5) return kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings =", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c):", "c.id, text = 'Настройки отправлены', show_alert = True ) @bot.callback_query_handler(func = lambda c:", ") bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']:", ") bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query(", "mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig(", "= 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings =", "time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id'])", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", "time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id,", "as e: print(e) @bot.message_handler(commands = ['infolog'], func = lambda msg: msg.chat.type in ['group',", "= '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data", "bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg,", "btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка", "'private') def bot_get_rules(msg): start_time = time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML'", "= chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1,", "utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook()", "lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i", "utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time", "'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only на сутки',", "= 1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text", "= 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width", "chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id", "None and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id = msg.from_user.id", "не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system'))", "'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data =", "msg.message_id ) if msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id", "= c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query(", "= cont_type, chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить все', callback_data", "if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message,", "== 'supergroup') def bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id", "def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings =", "c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def del_system(c): chat_id =", "'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id,", "btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text", "lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id } )", "= lambda msg: msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id,", "reply_markup = kb ) t.start() t.join() @bot.callback_query_handler(func = lambda c: c.data == 'admin_menu')", "lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'):", "chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 =", "is not None and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id", "@bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ])", "message_id = c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id, text =", "start_time = time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time)", "c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id )", "c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML'", "elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name", "not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id):", "types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1", "= utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i in inviters: inviter_info", "callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1)", "@bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private') def bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id,", "types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3, btn4) btn", "На некоторых серверах придется указывать такой же IP, что и выше WEBHOOK_LISTEN =", "text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time)", "@bot.callback_query_handler(func = lambda c: c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id,", "if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id,", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id =", "kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s =", "= types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное", "bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup(", "# @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c)", "= utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) )", "msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and", "kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id =", "text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['dellog'], func = lambda", "import cherrypy import config import secret_config import text import ujson import utils WEBHOOK_HOST", "c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id))", "chat_id, user_id ) if user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True,", "msg.chat.id, # text. # ) @bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version", "= 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data = 'to_groups_list'))", "working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day',", "settings['warns']['count'] + change_count if settings['warns']['count'] < 1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup(", "callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2)", "выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет, останься',", "chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id =", "'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id)))", ") else: bot.send_message( msg.chat.id, text = 'Данное приветствие не работает' ) utils.new_update(msg, time.time()-start_time)", "@bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id):", "else: bot.send_message( msg.chat.id, text = 'Данное приветствие не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'],", "= types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text =", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text", "c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id, text", "= c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings))", "btn3, btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]),", "msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings')", "c.message.message_id, text = 'Ваше меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id =", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard", "bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) )", "подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "'HTML' ) @bot.message_handler(commands = ['leave'], func = lambda msg: msg.chat.type != 'private' and", "= types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять приветствие", "print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'],", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda", "bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) )", "= msg kb = types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки отправлены вам в", "datetime import logging import random import re import ssl import subprocess import threading", "status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is", "start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types =", "utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения", "parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id,", "= web.Application() t = Thread(target = utils.check_deleting_queue) t.start() async def handle(request): if request.match_info.get('token')", "= types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text =", "bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private')", "= lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id,", "main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s", "msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode", "= 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data =", "if utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg,", "= utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id", "r.message_id ) bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'], func = lambda msg:", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "= 'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return kb", "api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id =", "получить в любое время и отправить @f0rden для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id))", ") else: if c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id,", "не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url'))", "btn3, btn4) btn = types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id", "callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id", "def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text", "'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id))", "text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['dellog'], func", "= bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members',", "%(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO ) app = web.Application() t", "and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time()", "text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode = 'HTML' ) else:", "c: c.data == 'change_lang') def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id,", "bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands =", "utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup')", "chat_id = msg.chat.id, message_id = r.message_id, reply_markup = kb ) bot.send_message( msg.from_user.id, '<b>Настройки", "= 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "= lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m =", "'Настройки отправлены', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data == 'stats_menu')", "bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id,", "'Только чаты', callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all')", "utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text", "and msg.reply_to_message is not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id,", "= 'К списку групп', callback_data = 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb =", "== 303986717) def bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML')", "utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены' ) else: bot.send_message( msg.chat.id, text = 'Правила", "= msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg) if", "status2 = False for i in admins: if i.user.id == user_id: if i.status", "= await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/',", "print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s ',", "= types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "utils.new_voteban(msg) # bot.send_message( # msg.chat.id, # text. # ) @bot.message_handler(commands = ['version']) def", "reply_markup = generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands = ['rmkb'], func = lambda", "составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func = lambda msg: msg.chat.type", "= msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if", "until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name", "c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users',", "c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type !=", "and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1)", "else: msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for i", "Thread(target = utils.make_broadcast, kwargs = { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text':", "reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' )", "bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda", "c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id", "lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id", "def del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id,", "ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id)", "settings['warns']['count'] = settings['warns']['count'] + change_count if settings['warns']['count'] < 1: settings['warns']['count'] = 1 api.change_group_params(chat_id,", "# user_name = api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'],", "msg_id = '' if chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply']", "= ['settings'], func = lambda msg: msg.chat.type == 'supergroup') def bot_answ(msg): start_time =", "[%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO ) app =", "i in admins: try: bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username = chat.username,", ") @bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode =", "подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры',", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state =", "'HTML' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if", "= 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id =", "and msg.reply_to_message is not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id)", "= types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id =", "chat_id = c.message.chat.id, message_id = c.message.message_id ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id,", "int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer']", "= bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode = 'HTML'", "главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for", "= i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное", "= chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return", "msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML',", "btn3, btn4) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard", "if msg.text == '/ban_me_please': t = random.randint(1, 10) ban_time = 60*t try: if", "msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id):", "if i.status == 'creator': status1 = True if i.user.id == my_info.id: status2 =", "bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands = ['rmkb'],", "msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu'])", "= bot.get_chat(msg.chat.id) msg_id = '' if chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt", "msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить настройки?', reply_markup = kb ) @bot.message_handler(commands", "api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda c: c.data == 'change_lang') def bot_change_lang(c): user_id", "рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() )", "generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for i", "if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if", "start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id =", "btn = types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return", "= ['get_logs'], func = lambda msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg):", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id", "'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6) btn", "= c.message.chat.id, message_id = c.message.message_id, text = 'Ваше меню' ) bot.edit_message_reply_markup( chat_id =", "'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb def unban_new_user_kb(msg): kb", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::'))", "= c.id, show_alert = True, text = 'У вас недостаточно прав для выполнения", "chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id", "{}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка перед", "and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) ) def", "api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg:", "предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка приветствий',", ") else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode = 'HTML' ) @bot.message_handler(commands", "= c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "callback_query_id = c.id, text = 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query(", "= 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id = c.id,", "return keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type in", "bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report'])", "chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да, выйди из", "c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def", "parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id )", "== 'supergroup') def bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def", "message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) state =", "else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message(", "= types.InlineKeyboardButton(text = 'Изменить язык', callback_data = 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text", "= 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text", "txt.format( group_name = api.replacer(msg.chat.title), group_username = chat.username, message_id = msg_id, user_id = msg.from_user.id,", "'message_id': c.message.message_id } ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data", "bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg:", "reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' )", "utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id = msg.from_user.id try: admins", "utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id,", "\"/%s/\" % (secret_config.token) start_time = int(time.time()) bot = telebot.TeleBot(token = secret_config.token) my_info =", "c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У", "message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications'))", "= chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Действие при максимальном", "c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id )", "'Ваше меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id)", "c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings", "= types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5)", "btn3 = types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text", "!= 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0: working_time_str =", "t ), parse_mode = 'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin']", "= msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' # ) utils.new_update(msg,", "kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id):", "settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "), # parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id", "= types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text =", "'HTML' ) bot.answer_callback_query( c.id, text = 'Список отправлен', show_alert = True ) @bot.callback_query_handler(func", "= c.message.message_id, text = 'Список ваших групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "@bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "@bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id =", "= c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' )", "types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2',", "'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func =", "kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func", "chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 =", "api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id,", "time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML'", "= 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text", "callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3", "[] for i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id", "btn3 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3) return", "время и отправить @f0rden для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id,", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id", "invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode = 'HTML' ) bot.answer_callback_query( c.id,", "del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id", "random import re import ssl import subprocess import threading import time from multiprocessing", "if status1 is True and status2 is True: utils.remove_log_channel(msg.chat.id) elif status1 is not", "bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню',", "i in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format(", "c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id))", "utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'],", ") ) @bot.callback_query_handler(func = lambda c: c.data == 'change_lang') def bot_change_lang(c): user_id =", "'У вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c:", "time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private') def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat)", "logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s", "in ['group', 'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id,", "utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time", "parse_mode = 'HTML' ) except Exception: bot.reply_to( msg, text = '' ) utils.new_update(msg,", "c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id))", "bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands = ['set_rules'], func", "= create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func", "group_name = api.replacer(msg.chat.title), group_username = chat.username, message_id = msg_id, user_id = msg.from_user.id, user_name", "btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text", "utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", "keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 =", "= 'Настройки отправлены', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data ==", "parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg),", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = kb ) t.start() t.join()", "'__main__': log_name = 'logs.txt' f = open(log_name,'w') f.close() print('Файл логов создан') telebot_logger =", "чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка", "= types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text =", "chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id))", "types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return", "func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id):", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id =", "'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id))", "api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func", "'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "= types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать", "utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'],", "utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] < 0:", "'location', 'contact'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time =", "keyboard def generate_leave_kb(msg): chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text =", "if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1", "utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "Process as Thread import telebot from aiohttp import web from telebot import types", "lambda msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message(", "text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif utils.check_for_forward(msg)", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id,", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count", "1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name,", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' )", "ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r =", "True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func", "= 'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width =", "'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c)", "btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg,", "подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg,", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id", "utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg):", "change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time']", "c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id", "referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id,", "types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id =", "get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func =", "logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s", "btn5) return kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns", "msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker', 'video', 'video_note',", "= words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user,", "= types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "['group', 'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id)", "= msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode", "= True if status1 is True and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func =", "def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Мои чаты',", "text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id", "create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func =", ") # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)", "utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state)", "utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg):", "'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит',", "btn2 = types.InlineKeyboardButton(text = 'Только чаты', callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text =", "user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id,", "= 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id", "= types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type", "for i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id =", "def bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func", "lambda c: c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять системные", "'Изменить язык', callback_data = 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота',", "reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c: c.data == 'broadcast_menu') def bot_admin_menu(c):", "@f0rden для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text = 'Настройки", "kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id)))", "lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id =", "utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights')", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id,", "generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands = ['rmkb'], func = lambda msg: msg.chat.type", "logging.getLogger('reports') if __name__ == '__main__': log_name = 'logs.txt' f = open(log_name,'w') f.close() print('Файл", "lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "= 'Изменения подтверждены.' ) else: t = Thread(target = utils.make_broadcast, kwargs = {", "message_id = c.message.message_id, text = text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users", "time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time", "None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif", "telebot import types import api import cherrypy import config import secret_config import text", "btn3, btn4) btn = types.InlineKeyboardButton(text = 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id))", "m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m =", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0])", "keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]),", "'HTML' ) @bot.message_handler(commands = ['rmkb'], func = lambda msg: msg.chat.type in ['group', 'supergroup'])", "), parse_mode='HTML' ) # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, #", "['my_chats', 'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id", "= arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id,", "bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) #", "generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data", "and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup", "прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda", "utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443 # 443, 80, 88 или 8443", "kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи',", "= types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text =", ") if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id,", "msg.message_id # ) # bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id,", "'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return kb def", "= 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id", "func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg) and", "'video', 'video_note', 'voice', 'location', 'contact'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def", "= ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN, port=WEBHOOK_PORT, ssl_context=context,", "= chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings", "'chats': 'диалоги', 'all': 'все' } btn1 = types.InlineKeyboardButton(text = 'Только диалоги', callback_data =", "= c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def", "try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' ) except Exception: bot.reply_to( msg, text", "# bot.delete_message( # msg.chat.id, # msg.message_id # ) # bot.send_message( # msg.chat.id, #", "= 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id =", "= 'Админка бота', callback_data = 'admin_menu')) return kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width", "= lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if", "kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data =", "keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data", "c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id,", "text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id =", "user_id, m, parse_mode = 'HTML' ) bot.answer_callback_query( c.id, text = 'Список отправлен', show_alert", "def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings))", "return keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text =", "= 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda", "# if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, )", "utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg,", "= 'В главное меню', callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func = lambda", "time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private') def bot_user_start(msg): message = msg start_time", "chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 =", "btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb():", "c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup(", "bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types", "msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'):", "utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text =", "['ban', 'ban_me_please'], func = lambda msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time =", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state]", "= 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data", "kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb", "bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func = lambda msg: msg.chat.type in ['group',", "= utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is", "def bot_del_log(msg): print(1) user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c):", "= api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE +", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text = 'Снятие ограничений", "btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text", "= i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def group_setting(chat_id): keyboard =", "} ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu'))", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time", "btn2 = types.InlineKeyboardButton(text = 'Изменить язык', callback_data = 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id):", "api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type", "= api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Список", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def", "c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func", "start_time = time.time() message = msg kb = types.InlineKeyboardMarkup() r = bot.reply_to( msg,", "= generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else:", "bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) )", "lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id", "= c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] =", "reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id", "'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id))", "bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' ) except Exception: bot.reply_to( msg, text =", "сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]),", "bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands = ['voteban']) # def bot_voteban(msg): #", "callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id", "= c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c):", "'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3)", "msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode = 'HTML' ) else: bot.reply_to( msg,", "time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id)", "{}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id =", "= 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id =", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id =", "@bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c)", "сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id,", "open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup =", "любое время и отправить @f0rden для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query(", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id =", "= chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5", "сбросить настройки?', reply_markup = kb ) @bot.message_handler(commands = ['update_time'], func = lambda msg:", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = kb ) t.start() t.join() @bot.callback_query_handler(func", "chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id", "c: c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "= types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки отправлены вам в личные сообщения', )", "time.time() message = msg if len(msg.text) not in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1]", "int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str =", "msg: msg.chat.type != 'private') def bot_check_text(msg): start_time = time.time() msg_text = msg.text msg_text_low", "elif not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg)", "= 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data = 'admin_menu'))", "user_id = msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' # )", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time =", "c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id,", "= types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id =", "return keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn", "c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id,", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private') def bot_group_start(msg): start_time = time.time()", "'Только диалоги', callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только чаты', callback_data =", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if", "c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c: c.data", "ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup(", "bot_set_rules(msg): start_time = time.time() message = msg if len(msg.text) not in [9, 21]:", "выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings'))", "= re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp", "@bot.callback_query_handler(func = lambda c: c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id", "'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb def generate_broadcast_check_menu_kb(): kb", "# utils.new_voteban(msg) # bot.send_message( # msg.chat.id, # text. # ) @bot.message_handler(commands = ['version'])", "working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time =", "def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.service_messages['stats'].format( all_users", "msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time() admins =", "максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего',", "in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split() bot.delete_message(", ") @bot.message_handler(commands =['setlog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id", "if chat.username: if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id =", "@bot.callback_query_handler(func = lambda c: c.data == 'change_lang') def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text(", "= './webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT)", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "= types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text =", "msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id,", ") api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda", "msg.reply_to_message is not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and", "'Рассылка-проверка', callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data = 'broadcast_settings')", "msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']:", "= time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda", "создан') telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports')", "def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func =", "'<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type ==", "and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type", "= utils.get_my_ip() WEBHOOK_PORT = 8443 # 443, 80, 88 или 8443 (порт должен", "user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg,", "msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id,", "c.id, show_alert = True, text = 'Вы не являетесь администратором. Текущий статус настройки:", "utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer'])", "user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name) ),", "from multiprocessing import Process as Thread import telebot from aiohttp import web from", "= 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "admins: if i.user.id == user_id: if i.status == 'creator': status1 = True if", "'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query(", "приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text", "elif status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2", "'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return kb def", "@bot.message_handler(commands = ['dellog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat", "меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type != 'private')", "'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "@bot.callback_query_handler(func = lambda c: c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id,", "c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "for i in admins: if i.user.id == user_id: if i.status == 'creator': status1", "= 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private')", "'HTML' ) except Exception: bot.reply_to( msg, text = '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands", "def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type in config.available_attachments: btn", "user_id)) kb.add(btn) return kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text =", "= api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda", "text = '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func = lambda msg:", "= c.message.message_id, text = text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "= utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) # settings[''] #", "= chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3", "c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in", "'Эти настройки можно получить в любое время и отправить @f0rden для восстановления их,", "= ['rmkb'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg): kb", "msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id", "@bot.message_handler(content_types = ['sticker'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time", "['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func", "главное меню', callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id", "@bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id =", "= True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) )", "def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands", "return kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns =", "time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id):", "reply_markup = kb ) @bot.message_handler(commands = ['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id)) def", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id", "time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML'", "for cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id))", "'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type !=", "start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None and not", "= 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6)", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id =", "'supergroup') def bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights')", "msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in", "btns = [] for i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'], callback_data", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard", "def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'],", "10) ban_time = 60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time()", "utils.generate_rules_text(msg), parse_mode = 'HTML' ) except Exception: bot.reply_to( msg, text = '' )", "text = text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id", "c.message.chat.id, message_id = c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not True:", "msg.chat.id)) def bot_set_rules(msg): start_time = time.time() message = msg if len(msg.text) not in", "msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) ) def bot_set_log(msg):", "= time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id", "uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time()", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] )", "!= 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())),", "меню', callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id ==", "config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data", "msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for i in admins: try:", "= types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text =", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "message_id = c.message.message_id, text = text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id =", "curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id =", "kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка',", "= c.id, text = 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id", "# ) # bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, #", "c: c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup(", "c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "@bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private') def bot_user_start(msg): message = msg start_time =", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private') def bot_new_warn(msg): start_time =", "приветствие не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private') def", "'{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type !=", "message_id = c.message.message_id ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", "types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id =", "= msg start_time = time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message(", "if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg)", "@bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id", ") else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У вас", "= types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id", "выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all'))", "['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format(", "api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id,", "msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception as e: print(e) bot.reply_to(", "time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time = time.time() if", "True ) @bot.callback_query_handler(func = lambda c: c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id", "= chat.username, message_id = msg_id, user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML'", "'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "'У вас недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) )", "bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg,", "'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time =", "testt(msg): start_time = time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) #", "= types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text =", "msg: msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and", "i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню',", "in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg)", "== 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600)", "= lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings", "show_alert = True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']])", "= types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data", "func = lambda msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id,", "btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'],", "= time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type", "'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id))", "message_id = c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words", "utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id,", "text = 'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func =", "msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings", "in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text =", "inviter_pos = counter, inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) )", "'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb", "utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg,", "= lambda c: c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id", "c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count =", "= delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен.' ) @bot.callback_query_handler(func", "'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id))", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c)", "chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2,", "change_count if settings['warns']['count'] < 1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id =", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0])", "['audio', 'document', 'photo', 'sticker', 'video', 'video_note', 'voice', 'location', 'contact'], func = lambda msg:", "except Exception as e: print(e) @bot.message_handler(commands = ['dellog'], func = lambda msg: msg.chat.type", "= msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for i in admins:", "bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text = 'Эти настройки можно", "txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for i in admins: try: bot.send_message(", "del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c))", ") bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda", "lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id,", "bot_user_start(msg): message = msg start_time = time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer =", "'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id =", "= types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить настройки?', reply_markup = kb", "text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id,", "bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c:", "kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb def", "lambda msg: msg.chat.type != 'private') def bot_check_text(msg): start_time = time.time() msg_text = msg.text", "указывать такой же IP, что и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem'", "types.InlineKeyboardButton(text = 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text", "api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id,", "lambda msg: msg.chat.id == 303986717) def bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id,", "= { 'users': 'пользователи', 'chats': 'диалоги', 'all': 'все' } btn1 = types.InlineKeyboardButton(text =", "format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level", "status1 is True and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not", "def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение',", "= time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types", ") elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id,", "start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name)", "arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb()", "приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token)", "new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']]", "if msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt", "= bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time']", "types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1',", "= text.reports_messages['report']['to_admin']['no_username'] for i in admins: try: bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title),", "time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' ) except Exception: bot.reply_to( msg,", "utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time() bot.send_message(", "выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV =", "text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else:", "= working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def", "message_id = c.message.message_id, text = 'Список ваших групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", ") def bot_set_log(msg): user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id =", "'➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data =", "= kb ) bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands =", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup(", "reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' )", "= t ), parse_mode = 'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason =", "bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name)", "logging import random import re import ssl import subprocess import threading import time", "def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id", "utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode = 'HTML'", "msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg):", "'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "= r.message_id, reply_markup = kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML'", "msg: msg.chat.type != 'private') def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands", "'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data = 'to_groups_list')) return", "= datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type != 'private')", "c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def notify_change(c):", "bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title )", "'pinned_message' ]) def bot_check_system(msg): start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id )", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c)", "def bot_report(msg): start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id =", "= c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages['start'], parse_mode", "settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "@bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token))", "'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id,", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def ro_time_change(c):", "settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id,", "групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_groups(user_id) )", "= types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id):", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c)", "'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id)", "lambda msg: msg.chat.type != 'private') def bot_get_rules(msg): start_time = time.time() try: bot.send_message( msg.from_user.id,", "btn1 = types.InlineKeyboardButton(text = 'Только диалоги', callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text =", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "= 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "2) btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text =", "as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time()", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id))", "= utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'],", "status1 = False status2 = False for i in admins: if i.user.id ==", "utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text", "'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id", "user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message(", "= c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def", "'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить язык', callback_data = 'change_lang') kb.add(btn1, btn2) if", "# elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, # msg.message_id # )", "c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "= 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id =", "= 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return", "'msg_text': '', 'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id } ) kb = types.InlineKeyboardMarkup()", "c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text =", "time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type", "@bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) #", "user_id = c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "= msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else:", "lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format(", "= 'Переход выполнен.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id =", "chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве", "keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 =", "c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.'", "# text. # ) @bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version =", "start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'],", "= c.message.chat.id, message_id = c.message.message_id, text = 'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id", "= settings['warns']['count'] + change_count if settings['warns']['count'] < 1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings))", "'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id == 495038140:", "'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Действие при", "= c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def", "parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer'])", "elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id):", "настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить топ", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private') def bot_about(msg): start_time = time.time()", "= c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data == 'check_broadcast')", "utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query(", "= c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "= api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn =", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def del_settings(c):", "api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id ) else:", "group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type)", "utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func", "types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn)", "callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]),", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if", "def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings =", "@bot.message_handler(commands = ['unban'], func = lambda msg: msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time", "or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo',", "return kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text =", "utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif", "для выполнения этого действия.' ) else: if c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "msg.chat.id, text = 'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func", "user_name = api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id", "user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided')", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id =", "time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker', 'video', 'video_note', 'voice', 'location', 'contact'], func", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c:", "True if status1 is True and status2 is True: utils.remove_log_channel(msg.chat.id) elif status1 is", "kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message(", "message_id = r.message_id, reply_markup = kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id),", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands", "c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_'))", ") else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func =", "c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg", "@bot.message_handler(commands =['setlog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is", "= chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3", "bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message(", "['dellog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is not", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "= True if i.user.id == my_info.id: status2 = True if status1 is True", "= 'Список ваших групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "# user_id = msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' #", "'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Выберите", "= utils.parse_chat_id(c) user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter =", "utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' ) else: bot.send_message( msg.chat.id, text = 'Данное", "callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id", "= 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id =", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) )", "message_id = msg_id, user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except", "btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text", "= types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text", "= c.message.message_id, text = 'Ваше меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private') def bot_new_warn(msg): start_time = time.time()", "'', 'user_id': c.from_user.id, 'message_id': c.message.message_id } ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В", "c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query(", "\"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func = lambda", "= 'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML'", "msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg):", "welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text =", "chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2", "types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id =", "lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text =", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id,", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id =", "def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( # msg.chat.id, # text. # ) @bot.message_handler(commands", "m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i in inviters: inviter_info = bot.get_chat_member(chat_id,", "request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def", "status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text", "недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func =", "lambda c: c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id", "= 'Изменить язык', callback_data = 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка", "and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg)", "c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id)", "callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg):", "= '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data", "delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]])", "'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id = r.message_id, reply_markup =", "c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data", "user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg),", "- %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO ) app = web.Application()", "этого действия.' ) else: if c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user =", "reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки:", "kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) state", "keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']),", "btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data = 'admin_menu')) return kb def", "'Админка бота', callback_data = 'admin_menu')) return kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width =", "status2 is True: utils.remove_log_channel(msg.chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text =", "def bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1]", "= 'У вас недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]])", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c):", "= chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4", "utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec =", "@bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message(", "chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query(", "settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json'))", "'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id)", "bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name =", "c.message.chat.id, message_id = c.message.message_id, reply_markup = kb ) t.start() t.join() @bot.callback_query_handler(func = lambda", "= 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings", "= 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id =", "callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def", "utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help'])", "= 'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not True: bot.send_message( msg.chat.id,", "@bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id = c.from_user.id", "= types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id", "config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id)) keyboard.add(btn) btn =", ") bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func", "c.message.message_id ) # @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id", "txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt =", "= 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2,", "callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data =", "= msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception as e: print(e)", "bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None and", "= c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda", "text = 'Список отправлен', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data", "= './webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному", "= ['reset_settings'], func = lambda msg: msg.chat.type != 'private') def bot_reset_settings(msg): start_time =", "text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML'", "c.id, show_alert = True, text = 'У вас недостаточно прав для выполнения этого", "ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id,", "@bot.message_handler(commands = ['reregister'], func = lambda msg: msg.chat.type == 'supergroup') def bot_reregister(msg): start_time", "kb ) bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'],", "reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' )", "bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get'))", "bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.service_messages['stats'].format( all_users =", "reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data == 'check_broadcast') def bot_admin_menu(c):", "remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id", "def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text", "text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id,", "chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 =", "api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) )", "== '__main__': log_name = 'logs.txt' f = open(log_name,'w') f.close() print('Файл логов создан') telebot_logger", "msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private') def", "c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "= bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 = False for i in admins: if", "'private') def bot_reset_settings(msg): start_time = time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить", "c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "@bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN, port=WEBHOOK_PORT,", "= 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data", "'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name =", "{}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id = r.message_id, reply_markup = kb )", "@bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "= c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def", "parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time", "btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика',", "lambda msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time = time.time() if msg.text ==", "show_alert = True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']])", "'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time = time.time()", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c:", "= 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = create_user_language_keyboard()", "c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c):", "chat_id = c.from_user.id, text = 'Эти настройки можно получить в любое время и", "generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func =", "utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, # msg.message_id # ) # bot.send_message(", "меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2)", "= 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data =", "= lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id,", "chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb", "'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text", "types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2',", "меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "бота', callback_data = 'admin_menu')) return kb def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2)", "= lambda msg: msg.chat.type == 'supergroup') def bot_answ(msg): start_time = time.time() message =", "text = 'Данное приветствие не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type", "msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id =", "bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private') def bot_group_start(msg): start_time =", "change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id)", "types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "else: bot.send_message( msg.chat.id, text = 'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands =", "msg.chat.type != 'private') def bot_check_text(msg): start_time = time.time() msg_text = msg.text msg_text_low =", "= int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str", "r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func =", "вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id,", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Список ваших групп' ) bot.edit_message_reply_markup(", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_ban(msg): start_time = time.time()", "utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id) elif", "+= text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count =", "group_username = chat.username, message_id = msg_id, user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ),", "отправить @f0rden для восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text =", "not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup')", "= ['leave'], func = lambda msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def", "= types.InlineKeyboardButton(text = 'Автоматический read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data =", "c.id, text = 'Переход выполнен.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def group_settings_deletions(c):", "c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings", "c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages['start'], parse_mode =", "utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id,", "рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return kb def generate_user_groups(user_id):", "'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id = msg.chat.id keyboard", "= new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c)", "can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name", "= 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width", "btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text =", "= time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func = lambda msg: msg.chat.id", "message_id = c.message.message_id, text = 'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "или 8443 (порт должен быть открыт!) # На некоторых серверах придется указывать такой", "start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg:", "bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id =", "btn = types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "= msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type == 'channel':", "c: c.data.startswith('lang::')) def change_language(c): words = re.split('::', c.data) lang = words[1] bot.edit_message_text( chat_id", "= utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "text import ujson import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443 # 443,", "callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data", "'К списку групп', callback_data = 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width", "len(msg.text) not in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message(", "= 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb =", "msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private') def", "!= 'private') def bot_check_text(msg): start_time = time.time() msg_text = msg.text msg_text_low = msg_text.lower()", "msg.chat.id, msg.message_id ) if msg.chat.type == 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat(", "func = lambda msg: msg.chat.type == 'supergroup') def bot_reregister(msg): start_time = time.time() if", "web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text", "'HTML' ) @bot.message_handler(commands = ['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg):", "c.message.message_id, text = 'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "utf8 import datetime import logging import random import re import ssl import subprocess", "text = 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id = c.id,", "= types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения',", "подтверждены.' ) else: t = Thread(target = utils.make_broadcast, kwargs = { 'is_test': True,", "callback_data = 'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data =", "msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 = False for i", "api.replacer(msg.chat.title), group_username = chat.username, message_id = msg_id, user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name),", "chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2", "msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided')", "= 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id =", "as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands", "'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn", "bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode = 'HTML' ) else: bot.reply_to(", "являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def", "'Рассылка', callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu') kb.add(btn1,", "== 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "bot.send_message( msg.chat.id, 'Правила изменены' ) else: bot.send_message( msg.chat.id, text = 'Правила составлены неверно'", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type =", "datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days')", "'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) )", "time.time() message = msg kb = types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки отправлены", "= True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) )", "time from multiprocessing import Process as Thread import telebot from aiohttp import web", "msg_text = msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message(", "прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c):", "c.id, text = 'Список отправлен', show_alert = True ) @bot.callback_query_handler(func = lambda c:", "= text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['infolog'], func =", "utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created',", "'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data", "types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10',", "btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg):", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup( chat_id", "'У вас недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) )", "parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer'])", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c)", ") else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'Вы не", "'➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5', callback_data =", "message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text =", "# bot.send_message( # msg.chat.id, # text. # ) @bot.message_handler(commands = ['version']) def bot_version(msg):", "msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg,", "@bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message(", "utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time =", "= lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for", "= lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if", "# ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id == 303986717) def", "kb.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id", "= 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Действие", "= utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif", "= new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else:", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split()", "), parse_mode='HTML' ) except Exception as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode =", "действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c):", "text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c)", "= 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb def unban_new_user_kb(msg):", "utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower()", "utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg,", "types.InlineKeyboardButton(text = 'Мои чаты', callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить язык',", "types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "= msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id,", "user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception as e:", "= uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time =", "utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text", "func = lambda msg: msg.chat.id == 303986717) def bot_text(msg): start_time = time.time() bot.reply_to(msg,", "t = Thread(target = utils.check_deleting_queue) t.start() async def handle(request): if request.match_info.get('token') == bot.token:", "utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text =", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count if settings['warns']['count'] <", "and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, # msg.message_id # ) # bot.send_message( #", "i.user.id == my_info.id: status2 = True if status1 is True and status2 is", "bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c:", "message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text =", "chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2,", "c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id )", "kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Мои чаты', callback_data =", "= bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info =", "@bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time)", "settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer']", "cherrypy import config import secret_config import text import ujson import utils WEBHOOK_HOST =", "r = bot.reply_to( msg, 'Настройки отправлены вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text =", "= c.message.message_id, text = 'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn", "'В главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "= msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id,", "time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands =", "return kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data", "= utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup') def", "!= 'private') def bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is", "= msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1)", "chat_id = msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings =", ") @bot.callback_query_handler(func = lambda c: c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id =", "user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg,", "utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text =", "api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup =", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id", "start_time = time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки", "chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data = 'to_groups_list')) return keyboard def", "parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id =", "elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id)", "lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id)", "(secret_config.token) start_time = int(time.time()) bot = telebot.TeleBot(token = secret_config.token) my_info = bot.get_me() telebot_logger", "= msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да, выйди из чата',", "= 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3 =", "c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id,", ") @bot.callback_query_handler(func = lambda c: c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id =", "= True, text = 'У вас недостаточно прав для выполнения этого действия. Текущий", "msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id,", "lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots')", "), parse_mode = 'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ),", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup( chat_id =", "логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y", "= 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id =", "text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) #", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private') def bot_user_start(msg): message =", "utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(chat_id) )", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func = lambda msg: msg.chat.type == 'supergroup')", "@bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c)", "= api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else:", "change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id =", "random.randint(1, 10) ban_time = 60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id,", "cont_type, chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить все', callback_data =", "c.from_user.id, 'message_id': c.message.message_id } ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню',", "[9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено'", "utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer']", "администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def kick_bots(c):", "c.message.message_id, text = 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "= c.id, text = 'Переход выполнен.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def", "'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3)", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] <", "curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token,", "msg.reply_to_message is not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id):", "= types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У вас", "text = 'У вас недостаточно прав для выполнения этого действия.' ) else: if", "c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): #", "uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day',", "= chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']),", "= api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id", "arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id =", "'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id': '', 'user_id': c.from_user.id, 'message_id':", "на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn =", "types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1)", "btn = types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id))", "reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format(", "types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id", "'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только чаты', callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query(", "str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days != 0: uptime_str", "= ['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML' )", "def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c))", "def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Админка' )", "func = lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is not None", "kb = types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки отправлены вам в личные сообщения',", "time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id =", "counter = 0 for i in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter +=", "c.message.chat.id, message_id = c.message.message_id, text = 'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id =", "def bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg,", "'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c)", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id", "c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check'))", "for i in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter += 1 m +=", "= c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id", "coding: utf8 import datetime import logging import random import re import ssl import", "2) btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text", "request_body_dict = await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403)", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions'))", "secret_config import text import ujson import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443", "text.reports_messages['report']['to_admin']['no_username'] for i in admins: try: bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username", "types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Мои чаты', callback_data = 'my_chats') btn2", "= c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id =", "@bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words = re.split('::', c.data) lang = words[1] bot.edit_message_text(", "bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats', 'all']:", "= msg.new_chat_member.first_name ), parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r =", "восстановления их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text = 'Настройки отправлены', show_alert", "= settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id =", "btn = types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id", "c.message.message_id, text = 'Список ваших групп' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "lambda msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'],", "btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard", "True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not True: bot.send_message(", "= c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text =", "msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg)", "= 'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name", "главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width =", "i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard", "chat.username, message_id = msg_id, user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' )", "= lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode", "types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data =", "% (secret_config.token) start_time = int(time.time()) bot = telebot.TeleBot(token = secret_config.token) my_info = bot.get_me()", ") utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created',", "'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id))", "msg.chat.type == 'private') def bot_user_start(msg): message = msg start_time = time.time() if utils.is_user_new(msg):", "'➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text", "'/ban_me_please': t = random.randint(1, 10) ban_time = 60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id):", "msg_id, user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception as", "text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private') def bot_new_warn(msg):", "msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup", "msg.chat.type != 'private') def bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda", "c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func", "api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda", "types.InlineKeyboardButton(text = 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) )", "utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg:", "= 'HTML' ) except Exception: bot.reply_to( msg, text = '' ) utils.new_update(msg, time.time()-start_time)", "text = 'Выберите тип рассылки' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "def handle(request): if request.match_info.get('token') == bot.token: request_body_dict = await request.json() update = telebot.types.Update.de_json(request_body_dict)", "change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count']", "логов создан') telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info =", "= 'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 =", "= chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4", "re.split('::', c.data) lang = words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "def bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'],", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled']", "bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if", "c: c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id =", ") @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words = re.split('::', c.data) lang = words[1]", "= utils.make_broadcast, kwargs = { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '',", "= time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.',", "'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn =", "utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id", "= c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c: c.data == 'to_main_menu')", "utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings'))", "'➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data =", ") bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func", "lambda c: c.data.startswith('welcome_timer')) def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", ") kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id =", "f.close() print('Файл логов создан') telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info')", "= 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id = r.message_id, reply_markup", "time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func", ") else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func =", "= int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] =", "reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg =", "= time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'],", "'➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data =", "bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func = lambda", "== 'channel': bot.send_message( msg.chat.id, text.promotion_message, parse_mode='HTML' ) bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id ==", "config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(chat_id)", "msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not True: bot.send_message( msg.chat.id, text", "kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Ввести", ") bot.answer_callback_query( c.id, text = 'Список отправлен', show_alert = True ) @bot.callback_query_handler(func =", "api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1", "logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__ == '__main__': log_name =", "открыт!) # На некоторых серверах придется указывать такой же IP, что и выше", "types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn", "api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) )", "такой же IP, что и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' #", "= types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private')", "msg: msg.chat.type == 'supergroup') def bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat)", "@bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "= new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id)", "bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id ) #", "= logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__", "settings = api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE", "def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать", "= 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data", "'supergroup') def bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id =", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id", "@bot.callback_query_handler(func = lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id =", "= 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Назад',", "time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func = lambda msg: msg.chat.type == 'supergroup') def", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func = lambda msg: msg.chat.type != 'private')", "= msg if len(msg.text) not in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting):", "'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML'", "callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите", ") bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode =", "msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg:", "['sticker'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time = time.time()", "= 60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time))", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c)", ") utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", "= chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data = 'to_groups_list')) return keyboard", "= bot.get_chat_member( chat_id, user_id ) if user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True,", "lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg)", "msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda", "+ change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id", "types import api import cherrypy import config import secret_config import text import ujson", "'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id))", "= logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров", "btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return", "btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text =", "api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text", "msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup') def", "21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены' )", "msg.chat.id, 'Вы действительно хотите сбросить настройки?', reply_markup = kb ) @bot.message_handler(commands = ['update_time'],", "'private') def bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time)", "c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "kb ) t.start() t.join() @bot.callback_query_handler(func = lambda c: c.data == 'admin_menu') def bot_admin_menu(c):", "= 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn =", "utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time =", "def bot_get_rules(msg): start_time = time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' )", "c: c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id user_settings = api.get_user_param(user_id,", "import telebot from aiohttp import web from telebot import types import api import", "int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None: user_id =", "= 'У вас недостаточно прав для выполнения этого действия.' ) else: if c.from_user.id", "utils.make_broadcast, kwargs = { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id':", "datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee)", "types.InlineKeyboardButton(text = 'Только чаты', callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все', callback_data", "types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for i in user_settings['admined_groups']: btn", "c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def group_setting(chat_id): keyboard", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message(", "# settings[''] # api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT,", "{}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) )", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup(", "msg.chat.id)) def testt(msg): start_time = time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg,", "text = 'У вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func =", "True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id", "kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id)", "1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast') btn2 = types.InlineKeyboardButton(text =", "bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member',", "'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec = uptime", "= '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c)", "msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['dellog'],", "types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id =", "в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text = 'Настройки отправлены', show_alert = True", "= chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id =", "@bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id =", "def bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg,", "bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode = 'HTML'", "datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO ) app = web.Application() t = Thread(target", "def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data", "88 или 8443 (порт должен быть открыт!) # На некоторых серверах придется указывать", "sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type", "if c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id )", "def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id) m =", "t.join() @bot.callback_query_handler(func = lambda c: c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id =", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id))", "message_id = c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func =", "msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg):", "bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda", "not in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id,", "['leave'], func = lambda msg: msg.chat.type != 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg):", "!= 'private') def bot_reset_settings(msg): start_time = time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да,", "in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "def welcome_timer_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings =", "from telebot import types import api import cherrypy import config import secret_config import", "bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func", "keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве варнов:", ") except Exception as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' )", "curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data =", "msg.forward_from_chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private') def bot_about(msg): start_time =", "= api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func =", "parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id", "= ['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( # msg.chat.id, # text.", "else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У вас недостаточно", "subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg:", "= 'Только диалоги', callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только чаты', callback_data", "utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time)", "'➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10', callback_data =", "в личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id)))", "c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id, text", "new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "r.message_id, reply_markup = kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' )", "for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "= 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id =", "= telebot.TeleBot(token = secret_config.token) my_info = bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite')", "def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data", "= lambda c: c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id,", "logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]#", "= utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id", "kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb", "types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb )", "start_time = time.time() if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def", "parse_mode = 'HTML' ) @bot.message_handler(commands = ['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id))", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id =", "= 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id))", "and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message(", "чаты', callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all') btn4", "utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message(", "def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings =", "user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text", "= int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if", "'creator': status1 = True if i.user.id == my_info.id: status2 = True if status1", ") utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", "= types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text =", "= 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "= types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data = 'broadcast_message::input')", "# msg.chat.id, # text. # ) @bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message( msg.chat.id,", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить все', callback_data = 'change_all::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "= api.replacer(msg.chat.title), group_username = chat.username, message_id = msg_id, user_id = msg.from_user.id, user_name =", "= 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings =", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode =", "btn2 = types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text", "== 'change_lang') def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "lambda c: c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message(", "'➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5', callback_data =", "начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() )", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] +", "= settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings))", "kb.add(btn) return kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Разблокировать',", "c.id, text = 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id =", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id =", "= 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический read-only на {}", "на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1", "= text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode = 'HTML' ) @bot.message_handler(commands = ['leave'], func", "@bot.message_handler(commands = ['get_logs'], func = lambda msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def", "not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg):", "else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i in", "reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private') def bot_group_start(msg): start_time", "= types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "= lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id,", "администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def del_system(c):", "= chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1,", "certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app( app,", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func = lambda msg: msg.chat.type == 'supergroup') def", "c: c.data.startswith('get_notifications')) def notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup(", "сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data =", "text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id =", "types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки отправлены вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text", "= lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "__name__ == '__main__': log_name = 'logs.txt' f = open(log_name,'w') f.close() print('Файл логов создан')", "is True: utils.remove_log_channel(msg.chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator']", "btn2, btn3, btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для:", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id,", "keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data =", "== 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def", "settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id,", "'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func = lambda msg: msg.chat.type == 'supergroup')", "'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id )", "'time_ro_-10000000000::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text =", "= 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg): keyboard", "types.InlineKeyboardButton(text = 'Только диалоги', callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только чаты',", "if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not", "import random import re import ssl import subprocess import threading import time from", "['unban'], func = lambda msg: msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time = time.time()", "# settings = api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, ) # Вебхук bot.remove_webhook() bot.set_webhook(", "is True and status2 is True: utils.remove_log_channel(msg.chat.id) elif status1 is not True: bot.send_message(", "c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id =", "admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 = False for i in admins:", "являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def", "прав для выполнения этого действия.' ) else: if c.from_user.id == user_id or utils.check_status(c.from_user.id,", "curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data", "'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id =", "func = lambda msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt',", "@bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML'", "and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id,", "bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ),", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.'", "'Рассылка начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb()", "keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Мои", "def del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id,", "msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and", "start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not", "bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands = ['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg)", "настройки?', reply_markup = kb ) @bot.message_handler(commands = ['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id))", "== 'supergroup') def bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words", "types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return", "keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format(", "= lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id,", "bot_set_text(msg): start_time = time.time() message = msg if len(msg.text) not in [9, 21]:", "= types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda", "= text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands = ['set_rules'], func = lambda msg:", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup(", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def", "Thread import telebot from aiohttp import web from telebot import types import api", "in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def", "callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить все',", "bot.delete_message( # msg.chat.id, # msg.message_id # ) # bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format(", "def warns_count_change(c): chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings =", "chat_id)) kb.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Показать приветствие', callback_data =", "{}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data", "msg: msg.chat.type != 'private') def bot_set_text(msg): start_time = time.time() message = msg if", "= time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id = int(words)", "msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def", "utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and", "def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands = ['voteban']) # def bot_voteban(msg):", "= msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить настройки?', reply_markup", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id =", "import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443 # 443, 80, 88 или", "msg.chat.id, msg.chat.id ) # @bot.message_handler(commands = ['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg) #", "= chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id))", "c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен.'", "settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count if settings['warns']['count'] < 1: settings['warns']['count']", "= 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 =", "msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query(", "callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1,", "= 'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb def generate_broadcast_check_menu_kb():", "bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif", "start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands", "'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id))", "lambda msg: msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id)", "= c.message.message_id ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", "'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.service_messages['stats'].format(", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_'))", "c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text", "keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn =", "'Переход выполнен.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c)", "is not None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) )", "Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV)", "msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: #", "not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time)", "= time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func", ") bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) # def", "+ change_count if settings['warns']['count'] < 1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id", "message = msg if len(msg.text) not in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if", "msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type", "ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query(", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id,", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Список ваших групп' )", "lambda c: c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "= 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить", "user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text", "btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id):", "что и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату", "'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id = c.message.message_id ) else: bot.answer_callback_query( callback_query_id =", "unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c)", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", "bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 = False for i in admins: if i.user.id", "= ['reregister'], func = lambda msg: msg.chat.type == 'supergroup') def bot_reregister(msg): start_time =", "== 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "= counter, inviter_id = inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message(", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id,", "чаты', callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить язык', callback_data = 'change_lang')", "'supergroup') def bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights')", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Рассылка начата' ) bot.edit_message_reply_markup(", "reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words = re.split('::', c.data)", "else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time", "if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id )", "in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn)", "sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not", "'days').replace('dayss', 'days') if uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days", "msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg)", "lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg) or", "= time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif", "!= 'private') def bot_set_text(msg): start_time = time.time() message = msg if len(msg.text) not", "text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id", "bot.token: request_body_dict = await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return", "['rules'], func = lambda msg: msg.chat.type != 'private') def bot_get_rules(msg): start_time = time.time()", "= True ) @bot.callback_query_handler(func = lambda c: c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text(", "'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id))", "True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func", "c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "= lambda c: c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id", "in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text(", "= c.message.message_id, text = 'Рассылка начата' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() )", "'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c)", "msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif", "c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c):", "= config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "= 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Показать", "t.start() async def handle(request): if request.match_info.get('token') == bot.token: request_body_dict = await request.json() update", "utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query(", "r.message_id ) @bot.message_handler(commands =['setlog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'] and", "= chat_id)) kb.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Показать приветствие', callback_data", "time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message'", "and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'],", "= datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss',", "callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id", "import text import ujson import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443 #", "WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time = int(time.time()) bot = telebot.TeleBot(token = secret_config.token)", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", "msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id,", "'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data = 'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "%H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type != 'private') def bot_check_text(msg):", "ujson import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443 # 443, 80, 88", "types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu')", "api import cherrypy import config import secret_config import text import ujson import utils", "= c.from_user.id, text = 'Эти настройки можно получить в любое время и отправить", "inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i in inviters:", "i['inviter']) counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id = inviter_info.user.id,", "utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", "message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text =", "= chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id))", "= utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] +", "c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text =", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func = lambda msg: msg.chat.type != 'private') def", "0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time", "bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time)", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id", "= int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name =", "r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func = lambda msg: msg.chat.type in ['group', 'supergroup']", "callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = kb", ") bot.send_message( user_id, m, parse_mode = 'HTML' ) bot.answer_callback_query( c.id, text = 'Список", "прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda", "import subprocess import threading import time from multiprocessing import Process as Thread import", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id) chat =", "c.message.message_id } ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data =", "303986717) def bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg,", "def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: t = Thread(target", "bot_answ(msg): start_time = time.time() message = msg kb = types.InlineKeyboardMarkup() r = bot.reply_to(", "except Exception as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee", "msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id =", "func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time = time.time() message =", "@bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "if user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text(", ") bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id", "show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data in ['my_chats', 'to_groups_list']) def", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id =", "chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user", "in admins: if i.user.id == user_id: if i.status == 'creator': status1 = True", "не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private') def bot_kick(msg):", "callback_data = 'reset_settings_confirmation::{chat_id}'.format(chat_id = msg.chat.id))) kb.add(types.InlineKeyboardButton(text = 'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id", "btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id):", "# msg.chat.id, # msg.message_id # ) # bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( #", "= api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id))", "msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker', 'video', 'video_note', 'voice',", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else:", "i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username = chat.username, message_id = msg_id, user_id =", "['reregister'], func = lambda msg: msg.chat.type == 'supergroup') def bot_reregister(msg): start_time = time.time()", "admins: try: bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username = chat.username, message_id =", "msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg):", "if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio',", "меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1)", "bot.reply_to( msg, text = '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func =", "user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id ) if user.status in", "bot.get_chat_member(chat_id, i['inviter']) counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id =", "'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c)", "для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn =", "= time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg,", "btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text", "if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode =", "start_time = int(time.time()) bot = telebot.TeleBot(token = secret_config.token) my_info = bot.get_me() telebot_logger =", "= logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] -", "'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb def unban_new_user_kb(msg): kb = types.InlineKeyboardMarkup(row_width=1) btn =", "= lambda msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb'))", "msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time)", "else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode = 'HTML' ) @bot.message_handler(commands =", "'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func = lambda msg:", "types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data", "@bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id == 303986717) def bot_text(msg): start_time = time.time()", "к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" %", "re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_'))", "start_time = time.time() message = msg if len(msg.text) not in [9, 21]: new_greeting", "msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg):", "curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id", "!= 'private') def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'],", "@bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg)", "utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and", "== '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format(", "'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку", "in ['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id),", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode", "# msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) #", "= api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) )", "== user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id ) if user.status", "return keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить предупреждения',", "def bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg,", "is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text =", "'supergroup') def bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words =", "warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id =", "new_rules) bot.send_message( msg.chat.id, 'Правила изменены' ) else: bot.send_message( msg.chat.id, text = 'Правила составлены", "keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data = 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id):", "c: c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id)", "c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def del_warns(c):", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count if", "None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg,", "IP, что и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к", "= 'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data", "curr_settings = api.get_group_params(chat_id) for cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type,", "'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg):", "config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def group_setting(chat_id):", "c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.'", "чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id =", "msg.chat.id, 'Приветствие изменено' ) else: bot.send_message( msg.chat.id, text = 'Данное приветствие не работает'", "= 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only на", "time.time()-start_time) @bot.message_handler(commands = ['rules'], func = lambda msg: msg.chat.type != 'private') def bot_get_rules(msg):", "c.message.chat.id, message_id = c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c: c.data", "@bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c)", "callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state'))", "= lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True)", "'change_lang') def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять приветствие в", "if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data = 'admin_menu')) return kb def generate_admin_menu_kb():", "= '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func = lambda msg: msg.chat.type", "['get_logs'], func = lambda msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id,", "not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func", "# ), # parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg:", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] <", "= uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if working_time.days != 0: working_time_str = working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru'))", "msg.chat.type != 'private') def bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands =", "'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id =", "bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec = uptime ),", "'users': 'пользователи', 'chats': 'диалоги', 'all': 'все' } btn1 = types.InlineKeyboardButton(text = 'Только диалоги',", "= types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']],", "(порт должен быть открыт!) # На некоторых серверах придется указывать такой же IP,", "def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message(", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def", "parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private') def bot_user_start(msg): message", "'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time = time.time() if", "'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days != 0: uptime_str =", "c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'Вы не являетесь", "bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup')", "c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id = utils.parse_chat_id(c) user_id = utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if", "types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное", "parse_mode = 'HTML' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id =", "кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data", "msg.chat.type == 'supergroup') def bot_answ(msg): start_time = time.time() message = msg kb =", "= chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1)", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer']", "= 'Ваше меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "callback_data = 'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data", "же IP, что и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь", ") @bot.message_handler(commands = ['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl", "chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings =", "= types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В", "= str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru')", "\"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time = int(time.time()) bot", "= lambda msg: msg.chat.type != 'private') def bot_get_rules(msg): start_time = time.time() try: bot.send_message(", "= utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text =", "WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к", "= int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None: user_id", "bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func = lambda c:", "= time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None and not utils.check_status(msg.reply_to_message.from_user.id,", "настройки можно получить в любое время и отправить @f0rden для восстановления их, в", "utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id,", "msg.chat.type == 'private') def bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' )", "log_name = 'logs.txt' f = open(log_name,'w') f.close() print('Файл логов создан') telebot_logger = logging.getLogger('telebot')", "= generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands = ['rmkb'], func = lambda msg:", "'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data =", "bot.send_message( chat_id = c.from_user.id, text = 'Эти настройки можно получить в любое время", "= lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings", "chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id = c.message.message_id )", "= lambda msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id)", "api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Список ваших", "'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c:", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def del_settings(c): words =", "chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode =", "c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен' )", "bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup", "c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c: c.data == 'to_main_menu') def", "secret_config.token) my_info = bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info = logging.getLogger('sqlite') main_info = logging.getLogger('main_info')", "callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else:", "= types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text =", "'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id", "'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c:", "btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time()", "c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.service_messages['stats'].format( all_users = api.get_users_count(),", "msg, text = '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func = lambda", "t = Thread(target = utils.make_broadcast, kwargs = { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type':", "'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5', callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id))", "c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only']", "bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'], func =", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки", "True if status1 is True and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1", "= 2) btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data = 'broadcast_message::input') btn2 =", "!= 'private') def bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban',", "msg.message_id ) @bot.message_handler(commands = ['settings'], func = lambda msg: msg.chat.type == 'supergroup') def", "api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text", "elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time", "report_info = logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s]", "выполнен.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type", "def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "'document', 'photo', 'sticker', 'video', 'video_note', 'voice', 'location', 'contact'], func = lambda msg: not", "bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id =", "types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню',", "types.InlineKeyboardButton(text = 'Автоматический read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id", "= utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = '<b>Настройки группы", "отправлены вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete {}", "utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id)", "= datetime.timedelta(seconds = int(time.time()-msg.date)) uptime_str = str(uptime).replace('day', 'days').replace('dayss', 'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss',", "msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']: if", "lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings =", "kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data = 'admin_menu')) return kb def generate_admin_menu_kb(): kb =", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count", "сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" %", "'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id))", "WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443 # 443, 80, 88 или 8443 (порт", "= types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Мои чаты', callback_data = 'my_chats')", "and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id)", "bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name", "callback_query_id = c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::'))", "msg kb = types.InlineKeyboardMarkup() r = bot.reply_to( msg, 'Настройки отправлены вам в личные", "btn4) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages['start'], parse_mode = 'HTML'", "message_id = c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c: c.data ==", "keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn =", "Exception as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time)", "return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for", "дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить", "%(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO", "callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data =", "keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 =", "bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username = chat.username, message_id = msg_id, user_id", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode =", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id ) bot.edit_message_text( text =", ") bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def bot_leave_cb(c):", "= False for i in admins: if i.user.id == user_id: if i.status ==", "c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "msg.chat.type != 'private') def bot_get_rules(msg): start_time = time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode", "def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Ваше меню'", "= 'HTML' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c)", "types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) for cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type =", "logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров создан')", "час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text", "'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return kb def user_settings_main_menu(msg): keyboard =", "types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data = 'broadcast_message::input') btn2", "== 'supergroup') def bot_ban_me_please(msg): start_time = time.time() if msg.text == '/ban_me_please': t =", "time.time() if msg.text == '/ban_me_please': t = random.randint(1, 10) ban_time = 60*t try:", "and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id)", "is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not", "app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text =", "# def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( # msg.chat.id, # text. # )", "utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type ==", "i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id']))", "c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments:", "c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in", "c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c)", "# Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN, port=WEBHOOK_PORT, ssl_context=context, ) # bot.remove_webhook() #", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return", "func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True)", "= 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс', callback_data = 'time_ro_-10000000000::{chat_id}'.format(chat_id =", "ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query(", "types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu') btn2 =", "= types.InlineKeyboardButton(text = 'Мои чаты', callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить", "msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t", "= group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c: c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text(", "'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет,", "callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10', callback_data =", "keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text", "msg.chat.id) and msg.reply_to_message is not None and not utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.new_warn(msg) elif not", "= text.service_messages['stats'].format( all_users = api.get_users_count(), all_chats = api.get_chats_count(), unblocked_users = api.get_unblocked_users_count(), unblocked_chats =", "c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text", "msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id =", "вас недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func", "Thread(target = utils.check_deleting_queue) t.start() async def handle(request): if request.match_info.get('token') == bot.token: request_body_dict =", "m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode = 'HTML' ) @bot.message_handler(commands = ['leave'],", "ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id)", "parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func = lambda msg: not utils.check_status(msg.from_user.id,", "ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN, port=WEBHOOK_PORT, ssl_context=context, )", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return kb", "'video_note', 'voice', 'location', 'contact'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg):", "int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) ) r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name),", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id", "= types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать", "in admins: try: bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username = chat.username, message_id", "= 'Только чаты', callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все', callback_data =", "utils.get_my_ip() WEBHOOK_PORT = 8443 # 443, 80, 88 или 8443 (порт должен быть", "callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_user_menu_kb(user_id): kb = types.InlineKeyboardMarkup(row_width = 1)", "start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func = lambda msg:", "shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type != 'private') def bot_check_text(msg): start_time", "= msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']:", "c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c):", "выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text(", "ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id))", "callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data", "callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4)", "'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = group_setting(chat_id), )", "chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2,", "callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить язык', callback_data = 'change_lang') kb.add(btn1,", "not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e)", "callback_data = 'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1) btn1", "else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time() uptime = datetime.timedelta(seconds", "bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Рассылка начата' )", "c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.", "i.user.id == user_id: if i.status == 'creator': status1 = True if i.user.id ==", "'del_system::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id))", "text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands = ['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id,", "status1 = True if i.user.id == my_info.id: status2 = True if status1 is", "'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg):", "== 'private') def bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg,", "utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text = 'Эти настройки можно получить в любое", "user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif utils.check_for_forward(msg) and", "c: c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete'))", "utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id", "str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]), 'ru') if", "bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) #", "= 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data =", "= kb ) t.start() t.join() @bot.callback_query_handler(func = lambda c: c.data == 'admin_menu') def", "= c.message.chat.id, message_id = c.message.message_id ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "return lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text", "= 'Мои чаты', callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить язык', callback_data", "), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id,", "@bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id,", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id =", "= text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id", "i in admins: if i.user.id == user_id: if i.status == 'creator': status1 =", "utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id, m, parse_mode", "utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo',", "utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state", "списку групп', callback_data = 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width =", "msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception as e: logging.error(e)", "message_id = c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text =", "= ['unban'], func = lambda msg: msg.chat.type == 'supergroup') def bot_user_unban(msg): start_time =", "if __name__ == '__main__': log_name = 'logs.txt' f = open(log_name,'w') f.close() print('Файл логов", "sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time)", "len(msg.text) not in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message(", "def testt(msg): start_time = time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time)", "ssl import subprocess import threading import time from multiprocessing import Process as Thread", "btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text", "utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands", "time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time = time.time() if", "Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id =", "welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled']", "= lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if", "== 'supergroup') def bot_answ(msg): start_time = time.time() message = msg kb = types.InlineKeyboardMarkup()", "= 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только чаты', callback_data = 'broadcast_check::chats') btn3 =", "callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4)", "types.InlineKeyboardButton(text = 'Получить дамп настроек', callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn =", "types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6 = types.InlineKeyboardButton(text = 'Сброс',", "= time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode =", ") @bot.callback_query_handler(func = lambda c: c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id =", "@bot.message_handler(commands = ['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time =", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func =", ") t.start() t.join() @bot.callback_query_handler(func = lambda c: c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text(", "if status1 is True and status2 is True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is", "# Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE", "def kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id,", "= ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings))", "'text', 'msg_text': '', 'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id } ) kb =", "= api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] =", "['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( # msg.chat.id, # text. #", "types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard =", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings))", "# if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id,", "bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен'", "{ 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id': '', 'user_id': c.from_user.id,", "= int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] =", "time.time() msg_text = msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id):", "WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN, port=WEBHOOK_PORT, ssl_context=context, ) # bot.remove_webhook()", "reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен' )", "= time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id)", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "{} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 =", "'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text =", ") utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message(", "can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name)", "количество исключений: {}'.format(curr_settings['warns']['count']), callback_data = 'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text =", "reply_markup = delete_settings(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Статус", "{}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id,", ") bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg):", "msg.reply_to_message: msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt =", "utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(utils.parse_chat_id(c)) )", "utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time() uptime = datetime.timedelta(seconds =", "c.message.chat.id, message_id = c.message.message_id ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True,", "keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data", "utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m", "msg.chat.id, m, parse_mode = 'HTML' ) @bot.message_handler(commands = ['leave'], func = lambda msg:", "in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) # settings[''] # api.change_group_params(chat_id, ) # Вебхук", "unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda c: c.data == 'change_lang') def", "= types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное количество", "notify_change(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c))", ") @bot.message_handler(commands = ['set_rules'], func = lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time", "= chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3", "быть открыт!) # На некоторых серверах придется указывать такой же IP, что и", "= api.get_group_params(chat_id) for cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id", "сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text = 'Настройки отправлены', show_alert = True ) @bot.callback_query_handler(func", "def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id,", "1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id)", "f = open(log_name,'w') f.close() print('Файл логов создан') telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql')", "print(1) user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 =", "t = random.randint(1, 10) ban_time = 60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member(", "utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить настройки?', reply_markup = kb )", "types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1", "c.data == 'change_lang') def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_'))", "handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'],", "c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id", "api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "r = bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message( msg.chat.id,", "'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data = 'del_url::{chat_id}'.format(chat_id", "Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id", "text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled'] ) bot.delete_message( c.message.chat.id, c.message.message_id", "not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Исключать ботов{}'.format(config.settings_statuses[curr_settings['kick_bots']]), callback_data='kick_bots::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type", "utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup') def", "status2 = True if status1 is True and status2 is True: utils.remove_log_channel(msg.chat.id) elif", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕5',", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: t =", "types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Переключить", "new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' ) else: bot.send_message( msg.chat.id, text = 'Данное приветствие", "%(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO ) app", "# @bot.message_handler(commands = ['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( # msg.chat.id,", "c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c: c.data == 'broadcast_menu') def", "495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date = int(time.time()+api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) )", "< 1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "return kb def user_settings_main_menu(msg): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn =", "msg if len(msg.text) not in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg,", "if len(msg.text) not in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules)", "utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count if settings['warns']['count'] < 1:", "'private') def bot_user_start(msg): message = msg start_time = time.time() if utils.is_user_new(msg): if utils.have_args(msg):", "'все' } btn1 = types.InlineKeyboardButton(text = 'Только диалоги', callback_data = 'broadcast_check::users') btn2 =", "msg.new_chat_member.first_name ), parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message(", "utils.parse_chat_id(c)): for i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg in ['users', 'chats', 'all']: curr_bot_settings['broadcast']['check']['recievers']", "else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func = lambda msg: msg.chat.type", "@bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id),", "not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and", "неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func = lambda msg: msg.chat.type !=", "= 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id,", "# Вебхук bot.remove_webhook() bot.set_webhook( url=WEBHOOK_URL_BASE + WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT,", "elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type ==", "time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg:", "generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type != 'private') def bot_set_text(msg): start_time", "text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def chat_settings(c): chat_id", "работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private') def bot_kick(msg): start_time", "datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type != 'private') def", "= int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] =", "= '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data", "callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1,", "msg: msg.chat.id == -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands =", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages['start'], parse_mode = 'HTML' )", "chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = '<b>Настройки", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings))", "= 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи', 'chats': 'диалоги', 'all':", "== my_info.id: status2 = True if status1 is True and status2 is True:", "check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def bot_help(msg): start_time = time.time() bot.send_message( msg.from_user.id,", "'photo', 'sticker', 'video', 'video_note', 'voice', 'location', 'contact'], func = lambda msg: not utils.check_status(msg.from_user.id,", "bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time)", "= types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text", "= time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' ) except Exception: bot.reply_to(", "def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Рассылка начата'", "import time from multiprocessing import Process as Thread import telebot from aiohttp import", "user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 = False", "working_time = working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members'])", "kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'],", "msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message(", "not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time = time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message(", "words = c.data.split() bot.delete_message( c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func =", "msg.text == '/ban_me_please': t = random.randint(1, 10) ban_time = 60*t try: if not", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def del_url(c): chat_id = utils.parse_chat_id(c) if", "kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "my_chats_list(c): user_id = c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id, message_id", "), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg, time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat)", "ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type ==", "time.time() if utils.is_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) # Кнопки @bot.callback_query_handler(func =", "utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and", ") @bot.callback_query_handler(func = lambda c: c.data == 'change_lang') def bot_change_lang(c): user_id = c.from_user.id", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id,", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id,", "= utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func = lambda c:", "text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message(", ") bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс", "= c.message.chat.id, message_id = c.message.message_id, text = 'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode", "or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id ) if user.status in ['restricted']:", "chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text =", "btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id):", "and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not", "e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands =", "= types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи', 'chats':", "'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "# Путь к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH =", "= msg if len(msg.text) not in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules):", "api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, # msg.message_id # ) # bot.send_message( # msg.chat.id,", "not None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def", "= generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text(", "= 'Все', callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data =", "bot_del_log(msg): print(1) user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu'))", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_count_')) def ro_time_change(c): change_count =", "inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode = 'HTML' ) bot.answer_callback_query(", "c.message.message_id, text = text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", ") bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode", "user_id = user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id = c.message.chat.id,", "'voice', 'location', 'contact'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time", "'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width =", "time.time()-start_time) @bot.message_handler(commands = ['reregister'], func = lambda msg: msg.chat.type == 'supergroup') def bot_reregister(msg):", "= c.from_user.id inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i", "txt = text.reports_messages['report']['to_admin']['no_username'] for i in admins: try: bot.send_message( i.user.id, txt.format( group_name =", "= 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 =", "msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func = lambda msg: not", "msg.chat.id) and msg.reply_to_message is not None: user_id = msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id,", "import config import secret_config import text import ujson import utils WEBHOOK_HOST = utils.get_my_ip()", "= msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' ) else: bot.send_message(", "callback_data = 'get_settings_json::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить топ инвайтеров',", "def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'], func =", "bot.leave_chat( msg.chat.id ) if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member(", "func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def bot_check_sticker(msg): start_time = time.time() if", "= re.split('::', c.data) lang = words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "{}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2',", "settings['warns']['count'] < 1: settings['warns']['count'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = group_setting(chat_id), ) @bot.callback_query_handler(func = lambda", "'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data =", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state", "{} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id = r.message_id, reply_markup = kb", ") except Exception as e: print(e) @bot.message_handler(commands = ['dellog'], func = lambda msg:", "text = 'Изменения подтверждены.' ) else: t = Thread(target = utils.make_broadcast, kwargs =", "callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data", "lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "'HTML' ) else: bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except", ") # bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, # user_name", "'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications') keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return", "kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id,", "бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new']", "= ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text", "chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Ваше меню' ) bot.edit_message_reply_markup( chat_id", "e: print(e) @bot.message_handler(commands = ['dellog'], func = lambda msg: msg.chat.type in ['group', 'supergroup']", "chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id =", "types.InlineKeyboardButton(text = 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn)", "= chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text", "= 'check_broadcast') btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2)", "== 'private') def bot_user_start(msg): message = msg start_time = time.time() if utils.is_user_new(msg): if", "- %(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level = logging.INFO )", "api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func = lambda msg: msg.chat.id == -1001236256304", "msg.chat.id, # msg.message_id # ) # bot.send_message( # msg.chat.id, # text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id", "message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус", "случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text = 'Настройки отправлены', show_alert = True )", "msg.chat.id ) if msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id,", "[LINE:%(lineno)-3d]# %(levelname)-8s - %(name)-9s [%(asctime)s] - %(message)-50s ', datefmt='%m/%d/%Y %I:%M:%S %p', level =", "time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if", "lambda c: c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id,", "lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode =", "chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2", "callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text =", "{}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_check::start')", "user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id =", "= i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) return", "= c.message.chat.id, message_id = c.message.message_id, text = 'Список ваших групп' ) bot.edit_message_reply_markup( chat_id", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен.' ) @bot.callback_query_handler(func = lambda", "= 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else:", "= [] for i in user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'], callback_data =", "True: utils.set_log_channel(msg.chat.id, msg.forward_from_chat.id) elif status1 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator']", "callback_query_id = c.id, text = 'Переход выполнен.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::'))", "def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands =", "@bot.message_handler(commands = ['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time", "= types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data =", "= 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5', callback_data = 'welcome_timer_-5::{chat_id}'.format(chat_id =", "'supergroup') def bot_ban_me_please(msg): start_time = time.time() if msg.text == '/ban_me_please': t = random.randint(1,", "'Сброс отменен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']:", "8443 # 443, 80, 88 или 8443 (порт должен быть открыт!) # На", "bot.send_message( msg.from_user.id, text.user_messages[utils.get_user_lang(msg)]['help'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private')", "= inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode = 'HTML' )", "'пользователи', 'chats': 'диалоги', 'all': 'все' } btn1 = types.InlineKeyboardButton(text = 'Только диалоги', callback_data", "= c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def", "callback_data = 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id,", "api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message( c.message.chat.id, 'Настройки сброшены.' ) bot.delete_message( c.message.chat.id, c.message.message_id ) else: bot.delete_message(", "= chat_id)) kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id))", "= 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn) return kb", "= time.time() msg_text = msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id,", "'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id))", "= lambda msg: msg.chat.id == 303986717) def bot_text(msg): start_time = time.time() bot.reply_to(msg, \"<code>'{}':", "elif status2 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception", "), reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot", "bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'],", "utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time = time.time() message = msg if len(msg.text) not", "func = lambda msg: msg.chat.type == 'supergroup') def bot_answ(msg): start_time = time.time() message", "r = bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['new_user']['read_only'].format( user_id = msg.new_chat_member.id, user_name = api.replacer(msg.new_chat_member.first_name), ban_time =", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id,", "def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = {", "bot_set_log(msg): user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 = False status2 =", ") bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands = ['settings'], func", "c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat)", "= types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text =", "= 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data", "= bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func = lambda", "msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['unban'], func =", "сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id", "= warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func", "btn = types.InlineKeyboardButton(text = 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id =", "3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи', 'chats': 'диалоги', 'all': 'все'", "@bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private') def bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id,", "c.from_user.id, text = 'Эти настройки можно получить в любое время и отправить @f0rden", "web.Application() t = Thread(target = utils.check_deleting_queue) t.start() async def handle(request): if request.match_info.get('token') ==", "= 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return kb def generate_user_groups(user_id): kb =", "time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private') def bot_new_warn(msg): start_time = time.time() if", "ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query(", "kb.add(btn1, btn2, btn3) return kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings", "bot_broadcast(msg): r = bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id =", "= 'Кик', callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private') def bot_user_start(msg): message = msg", "= api.replacer(msg.from_user.first_name) ), parse_mode='HTML' ) # elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( #", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def", "c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message(", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У", "new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action']", "msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ), parse_mode = 'HTML' )", "text. # ) @bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION),", "return kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text =", "except Exception: bot.reply_to( msg, text = '' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'],", "callback_data = 'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id =", "text.group_commands[utils.get_group_lang(msg.chat.id)]['registration'], parse_mode = 'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup') def bot_users_ro(msg):", "'all': 'все' } btn1 = types.InlineKeyboardButton(text = 'Только диалоги', callback_data = 'broadcast_check::users') btn2", "= \"/%s/\" % (secret_config.token) start_time = int(time.time()) bot = telebot.TeleBot(token = secret_config.token) my_info", "try: bot.send_message( i.user.id, txt.format( group_name = api.replacer(msg.chat.title), group_username = chat.username, message_id = msg_id,", "types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb def", "user_name = api.replacer(msg.from_user.first_name) # ), # parse_mode='HTML' # ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=['photo'], func", "chat_id = utils.parse_chat_id(c) user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter", "api.get_group_params(chat_id) curr_state = settings['greeting']['is_enabled'] new_state = config.settings_states[curr_state] settings['greeting']['is_enabled'] = new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup(", "chat_id = utils.parse_chat_id(c) bot.send_message( c.message.chat.id, utils.get_greeting(chat_id), parse_mode = 'HTML' ) @bot.callback_query_handler(func = lambda", "message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text =", "btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "= '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data", "c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход выполнен'", "cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id = chat_id)) keyboard.add(btn)", "'диалоги', 'all': 'все' } btn1 = types.InlineKeyboardButton(text = 'Только диалоги', callback_data = 'broadcast_check::users')", "msg.reply_to_message.sticker.file_id utils.ban_sticker(msg, sticker_id) elif not utils.check_status(msg.from_user.id, msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda", "api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name =", "msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title',", "time.time()-start_time) # Кнопки @bot.callback_query_handler(func = lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c)", "utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id =", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id =", "else: bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, 'Сброс отменен' ) @bot.callback_query_handler(func = lambda", "msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id)", "= 8443 # 443, 80, 88 или 8443 (порт должен быть открыт!) #", "utils.send_err_report(msg, 'not_enought_rights') elif utils.have_args(msg) and not check_status(msg.from_user.id): utils.send_err_report(msg, 'no_args_provided') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['help']) def", "api.get_group_params(msg.chat.id)['greeting']['delete_timer']) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types=[ 'new_chat_members', 'left_chat_member', 'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id',", "'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3)", "callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все', callback_data = 'broadcast_check::all') btn4 =", "= text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id", "= bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message( msg.chat.id, r.message_id", "'private') def bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not", "callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width", "until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode = 'HTML'", "unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member(", "для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c:", "21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' )", "message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def", "new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id = utils.parse_chat_id(c) if", "'➖2', callback_data = 'time_ro_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data =", "msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func = lambda msg: msg.chat.type in", "60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to(", "c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func", "= kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time)", "c.message.message_id ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У", "'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg):", "'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r = bot.send_message( msg.chat.id, utils.generate_welcome_text(msg), parse_mode='HTML' )", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('change_all')) def group_settings_deletions_all(c): chat_id = utils.parse_chat_id(c) if", "'➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data =", "= c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c):", "warns_count_change(c): chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id)", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id, chat_id) bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id", "'empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id =", "выполнения этого действия.' ) else: if c.from_user.id == user_id or utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user", "'Все', callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback')", "msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove() )", "= chat_id)) btn5 = types.InlineKeyboardButton(text = 'Навсегда', callback_data = 'time_ro_+10000::{chat_id}'.format(chat_id = chat_id)) btn6", "utils.is_new_in_chat(msg) and api.get_group_params(msg.chat.id)['restrict_new'] == '1': if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id )", "'supergroup') def bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) bot.send_message(", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda", "'Админка' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_admin_menu_kb() )", "} btn1 = types.InlineKeyboardButton(text = 'Только диалоги', callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text", "utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id,", "@bot.message_handler(commands = ['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( # msg.chat.id, #", "= c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Переход", "msg.chat.type == 'supergroup') def bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): api.register_new_chat(msg.chat) api.change_group_params(msg.chat.id,", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('leave_')) def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'):", "types.InlineKeyboardButton(text = '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1',", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup(", "settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "'Настройки отправлены вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data = 'settings_delete", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Ваше меню' ) bot.edit_message_reply_markup(", "lang = words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language'])", "вас недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func", "r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У", "api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "@bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)):", "btn = types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def", "utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user')) def unban_new_user(c): chat_id", "kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data = 'admin_menu')) return kb", "= text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands = ['dellog'], func =", "r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and api.get_group_params(msg.chat.id)['kick_bots']: bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'],", "= lambda c: c.data == 'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id", "func = lambda msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time = time.time() if", "if utils.check_for_urls(msg) and api.get_group_params(msg.chat.id)['deletions']['url']: bot.delete_message( msg.chat.id, msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id =", "'contact'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id)) def testt(msg): start_time = time.time()", "настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", "bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard(): lang_keyboard = types.InlineKeyboardMarkup()", "time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id,", "администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_url')) def del_url(c):", "c.data.startswith('settings::')) def chat_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "msg.chat.id ) # @bot.message_handler(commands = ['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message(", "bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message( msg.chat.id, r.message_id )", "from aiohttp import web from telebot import types import api import cherrypy import", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'get_notifications') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) )", "is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e:", "= ['infolog'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg): if", "api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type", ") bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML' )", "msg: msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id", "для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c:", "= chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id = chat_id)) btn3", "= types.InlineKeyboardButton(text = 'Разблокировать', callback_data = 'unban_new_user::{chat_id}::{user_id}'.format(user_id = msg.new_chat_member.id, chat_id = msg.chat.id)) kb.add(btn)", "msg: msg.chat.type == 'private') def bot_about(msg): start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML'", "= 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_action_')) def warns_count_change(c): new_mod =", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text", "message = msg if len(msg.text) not in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if", "= 'logs.txt' f = open(log_name,'w') f.close() print('Файл логов создан') telebot_logger = logging.getLogger('telebot') mysql_info", "не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы", "= 'Список отправлен', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data in", "'to_main_menu')) return kb def generate_broadcast_vars_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text", "btn4 = types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3,", "group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) )", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_restrictions_admins_only_')) def warns_count_change(c): chat_id", "@bot.callback_query_handler(func = lambda c: c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id,", "utils.parse_chat_id(c) user_id = c.from_user.id inviters = utils.get_top_inviters(chat_id) m = text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0", "= c.message.chat.id, message_id = c.message.message_id, text = text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup(", "api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode", "= 'В главное меню', callback_data = 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb =", "callback_data = 'broadcast_check::all') btn4 = types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func = lambda msg: msg.chat.type != 'private') def", "@bot.message_handler(content_types = ['audio', 'document', 'photo', 'sticker', 'video', 'video_note', 'voice', 'location', 'contact'], func =", "user_id = c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages['start'],", ") @bot.callback_query_handler(func = lambda c: c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id =", "== 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name =", "msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.ban_stickerpack(msg) else:", "= lambda c: c.data.startswith('warns_settings')) def warns_count_change(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "kb ) @bot.message_handler(commands = ['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg)", "utils.check_status(c.from_user.id, utils.parse_chat_id(c)): user = bot.get_chat_member( chat_id, user_id ) if user.status in ['restricted']: bot.restrict_chat_member(", "and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and", "change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id =", "= lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")),", "bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text =", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] = config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup(", "bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode = 'HTML' ) @bot.message_handler(commands = ['set_rules'],", "and utils.check_status(msg.from_user.id, msg.chat.id) and not utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id = msg.from_user.id try:", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']])", "bot.answer_callback_query( c.id, text = 'Список отправлен', show_alert = True ) @bot.callback_query_handler(func = lambda", "= generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words = re.split('::', c.data) lang", "generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda c: c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id", "= types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def warns_settings_kb(chat_id): keyboard", "= generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1]", "callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu'))", "= 'Изменения подтверждены. Статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "kb @bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id == secret_config.channel_ID) def bot_broadcast(msg): r =", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id", "api.replacer(msg.new_chat_member.first_name), ban_time = api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id,", "= logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s", "bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id = '' if chat.username: if msg.reply_to_message: msg_id =", "btn3) return kb def generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token))", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func = lambda msg: msg.chat.id == -1001236256304 and", "== 'supergroup') def bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): utils.read_only(msg) else: utils.send_err_report(msg,", "def new_users_restrictions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id =", "btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3,", "text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for i in admins: try: bot.send_message( i.user.id, txt.format(", "WEBHOOK_URL_PATH, certificate=open(WEBHOOK_SSL_CERT, 'r')) context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(WEBHOOK_SSL_CERT, WEBHOOK_SSL_PRIV) # Start aiohttp server web.run_app(", "i in config.available_attachments: utils.change_state_deletions_files(chat_id, i) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "kb.add(btn) btn1 = types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 =", "'Удалить', callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id =", "= types.InlineKeyboardButton(text = 'Только диалоги', callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только", "working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0], utils.get_text_translation(uptime_str.split(',')[0]),", "settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "btn = types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) )", "'supergroup') def bot_sticker_ban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = msg.reply_to_message.sticker.file_id utils.ban_sticker(msg,", "bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode =", "utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'], func = lambda msg: msg.chat.type ==", "text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for", "i['code']))) return lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id) btn =", "= 'В главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' )", "= utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] =", "import datetime import logging import random import re import ssl import subprocess import", "chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data", "callback_data = 'broadcast_check::users') btn2 = types.InlineKeyboardButton(text = 'Только чаты', callback_data = 'broadcast_check::chats') btn3", "settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup(", "'welcome_state::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия: {}", "btn = types.InlineKeyboardButton(text = 'Да, выйди из чата', callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "= datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec = uptime ), reply_to_message_id=msg.message_id, parse_mode='HTML' ) utils.new_update(msg,", "btn5 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4,", "types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast') btn2 =", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_delete')) def del_settings(c): words", "text = 'Настройки отправлены', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data", "< 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system') bot.edit_message_reply_markup( chat_id=c.message.chat.id,", "= ['menu']) def bot_user_menu(msg): bot.send_message( msg.from_user.id, 'Ваше меню', reply_markup = generate_user_menu_kb(msg.from_user.id) ) @bot.message_handler(commands=['set_text'],", "callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data = 'warns_count_-1::{chat_id}'.format(chat_id", "= lambda msg: utils.check_status(msg.from_user.id, msg.chat.id)) def bot_set_rules(msg): start_time = time.time() message = msg", "= delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else:", "if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены' ) else: bot.send_message( msg.chat.id, text", "= time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id )", "['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id): # if utils.is_new_in_chat(msg) and", "c.data.startswith('warns_action_')) def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings", "msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg):", "report_info = logging.getLogger('reports') if __name__ == '__main__': log_name = 'logs.txt' f = open(log_name,'w')", "= text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func = lambda c: c.data.startswith('get_notifications')) def notify_change(c): chat_id =", "curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Автоматический read-only на {} час -", "reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func =", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id", "return kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn", "parse_mode='HTML' ) except Exception as e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def", "= types.InlineKeyboardButton(text = 'Ввести сообщение', callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть", "= 'to_main_menu')) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = kb )", "bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У вас недостаточно прав", "= '➖1', callback_data = 'time_ro_-1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data", "этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c)", "words = re.split('::', c.data) lang = words[1] bot.edit_message_text( chat_id = c.message.chat.id, message_id =", "групп', callback_data = 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4)", "= lambda msg: msg.chat.type != 'private') def bot_reset_settings(msg): start_time = time.time() kb =", "= types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2,", "= types.InlineKeyboardButton(text = 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]), callback_data = 'welcome_state::{chat_id}'.format(chat_id = chat_id))", "func=lambda msg: msg.chat.type != 'private') def bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time)", "types.InlineKeyboardButton(text = 'Сейчас: {}'.format(s[curr_settings['broadcast']['check']['receivers']]), callback_data = 'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать рассылку',", "msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' ) else: bot.send_message( msg.chat.id,", "= time.time() admins = bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id = '' if chat.username:", "= text.reports_messages['report']['to_admin']['have_username']['no_reply'] else: txt = text.reports_messages['report']['to_admin']['no_username'] for i in admins: try: bot.send_message( i.user.id,", "= c.message.message_id, reply_markup = kb ) t.start() t.join() @bot.callback_query_handler(func = lambda c: c.data", "int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod", "chat_id)) btn3 = types.InlineKeyboardButton(text = '➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 =", "user_id: if i.status == 'creator': status1 = True if i.user.id == my_info.id: status2", "if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'url') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id =", "msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode = 'HTML' )", "'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить", "msg: msg.chat.type != 'private') def bot_get_rules(msg): start_time = time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg),", "# text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( # user_id = msg.from_user.id, # user_name = api.replacer(msg.from_user.first_name) # ), #", "text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup = kb ) bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id )", "utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data = 'admin_menu')) return kb def generate_admin_menu_kb(): kb", "text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id", "0 for i in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter += 1 m", "start_timee = time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds = int(time.time()-msg.date))", "types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text = 'В", "= True ) @bot.callback_query_handler(func = lambda c: c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c):", "@bot.message_handler(commands = ['rmkb'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg):", "settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id,", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c:", "curr_bot_settings['broadcast']['check']['recievers'] = arg api.change_bot_settings(secret_config.token, ujson.dumps(curr_bot_settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "c: c.data == 'broadcast_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "bot.get_chat_member( chat_id, user_id ) if user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True,", "btn = types.InlineKeyboardButton(text = 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state =", "types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать рассылку',", "'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id))", "msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1] user_id = int(words) utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id,", "Exception as e: print(e) @bot.message_handler(commands = ['infolog'], func = lambda msg: msg.chat.type in", "api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count if settings['warns']['count'] < 1: settings['warns']['count'] = 1", "message_id = c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def", "can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name) ),", "@bot.message_handler(commands = ['rules'], func = lambda msg: msg.chat.type != 'private') def bot_get_rules(msg): start_time", "def generate_admin_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data", "c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id,", "new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text =", "lambda msg: msg.chat.type == 'supergroup') def bot_reregister(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id):", "kwargs = { 'is_test': True, 'receivers': curr_bot_settings['broadcast']['check']['recievers'], 'cont_type': 'text', 'msg_text': '', 'file_id': '',", "= '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' #", "'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить настройки?',", "c.message.chat.id, words[2] ) bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def", "c: c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup(", "kb.add(btn) btn = types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data =", "api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time()", "utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights')", "= user.user.id, user_name = api.replacer(user.user.first_name) ), parse_mode = 'HTML', chat_id = c.message.chat.id, message_id", ") else: t = Thread(target = utils.make_broadcast, kwargs = { 'is_test': True, 'receivers':", "c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id, cont_type) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_settings'))", "= c.message.message_id, reply_markup = generate_broadcast_check_menu_kb() ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", ") @bot.message_handler(commands=['set_text'], func = lambda msg: msg.chat.type != 'private') def bot_set_text(msg): start_time =", "my_info.id: status2 = True if status1 is True and status2 is True: utils.remove_log_channel(msg.chat.id)", "callback_data = 'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings", "= types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id", "'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад',", "= types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_user_param(msg.chat.id, 'settings') btn = types.InlineKeyboardButton(text = 'Принимать рассылки{}'.format(config.settings_statuses['get_notifications']), callback_data='get_notifications')", "import api import cherrypy import config import secret_config import text import ujson import", "сообщение', callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_message::start')", ") @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time = time.time() if", "time.time()-start_time) @bot.message_handler(content_types=['photo'], func = lambda msg: msg.chat.id == 303986717) def bot_text(msg): start_time =", ") utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private') def bot_kick(msg): start_time =", "== -1001236256304 and utils.check_super_user(msg.from_user.id)) def bot_logs(msg): bot.send_document(msg.chat.id, open('logs.txt', 'rb')) @bot.message_handler(commands = ['menu']) def", "= 'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data", "= time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands =", "set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d %H:%M:%S\")), shell=True) bot_ping(msg) @bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type", "= 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное количество исключений: {}'.format(curr_settings['warns']['count']),", "i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1)", "= inviter_info.user.id, inviter_firstname = inviter_info.user.first_name, invited_count = int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode", "= lambda c: c.data.startswith('reset_settings')) def reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if", "= open(log_name,'w') f.close() print('Файл логов создан') telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info", "btn4) btn = types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id =", "!= 'private') def bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id):", "WEBHOOK_PORT = 8443 # 443, 80, 88 или 8443 (порт должен быть открыт!)", "msg.chat.id): utils.send_err_report(msg, 'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg):", "при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]), callback_data='empty_callback::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text =", "start_time = time.time() bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg:", ") else: bot.send_message( msg.chat.id, text = 'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands", "not in [9, 21]: new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id,", "lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] +", "'lang::{lang_code}'.format(lang_code = i['code']))) return lang_keyboard def group_setting(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings = api.get_group_params(chat_id)", "and msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id = msg.from_user.id", "if request.match_info.get('token') == bot.token: request_body_dict = await request.json() update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return", "сообщение', callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data = 'broadcast_message::show')", "!= 'private' and utils.check_status(msg.from_user.id, msg.chat.id)) def bot_leave(msg): bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg),", "1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "int(time.time()) bot = telebot.TeleBot(token = secret_config.token) my_info = bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info", "= types.InlineKeyboardButton(text = 'Только чаты', callback_data = 'broadcast_check::chats') btn3 = types.InlineKeyboardButton(text = 'Все',", "if utils.check_log(msg.chat.id): m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_on'].format( chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else:", "start_time = time.time() try: bot.send_message( msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' ) except Exception:", "inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter,", "msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands =", "= lambda c: c.data == 'change_lang') def bot_change_lang(c): user_id = c.from_user.id bot.edit_message_text( chat_id", "c.id, text = 'Изменения подтверждены.' ) else: t = Thread(target = utils.make_broadcast, kwargs", "= ['rules'], func = lambda msg: msg.chat.type != 'private') def bot_get_rules(msg): start_time =", "'days') working_time_str = str(working_time).replace('day', 'days').replace('dayss', 'days') if uptime.days != 0: uptime_str = uptime_str.replace(uptime_str.split(',')[0],", "= lambda c: c.data.startswith('deletions_settings')) def to_deletions(c): chat_id = utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "parse_mode = 'HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id,", "api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'У вас", "= c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words =", "new_users_restrictions_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query(", "@bot.message_handler(commands = ['infolog'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg):", "= 'У вас недостаточно прав для выполнения этого действия. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['files'][cont_type]])", "= time.time() message = msg if len(msg.text) not in [9, 21]: new_greeting =", "c.message.chat.id, message_id = c.message.message_id, text = 'Ваше меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "message_id = c.message.message_id, reply_markup = generate_user_groups(user_id) ) bot.answer_callback_query( callback_query_id = c.id, text =", "utils.check_log(msg.chat.id) ) def bot_set_log(msg): user_id = msg.from_user.id try: admins = bot.get_chat_administrators(msg.forward_from_chat.id) status1 =", "e: print(e) @bot.message_handler(commands = ['infolog'], func = lambda msg: msg.chat.type in ['group', 'supergroup'])", "для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def group_settings_deletions_photo(c): chat_id", "подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c) chat_id", "def bot_leave_cb(c): if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.endswith('confirm'): bot.delete_message( c.message.chat.id, c.message.message_id ) bot.send_message( c.message.chat.id,", "bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = text.user_messages[lang]['chosen_language']) api.register_new_user(c.from_user, lang) @bot.callback_query_handler(func", "bot.reply_to( msg, 'Настройки отправлены вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data", "int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time']", "utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['warns']['action'] = new_mod api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup(", "msg_id = msg.reply_to_message.message_id txt = text.reports_messages['report']['to_admin']['have_username']['reply'] else: msg_id = msg.message_id txt = text.reports_messages['report']['to_admin']['have_username']['no_reply']", "msg.new_chat_member.id == 495038140: api.change_group_params(msg.chat.id, ujson.dumps(config.default_group_settings)) else: if api.get_group_params(msg.chat.id)['restrictions']['read_only']: bot.restrict_chat_member( msg.chat.id, msg.new_chat_member.id, until_date =", "= 'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К", "не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('kick_bots'))", "msg.text msg_text_low = msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id", "= time.time() message = msg if len(msg.text) not in [9, 21]: new_rules =", "re import ssl import subprocess import threading import time from multiprocessing import Process", "= types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3, btn4)", "keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Максимальное", "'Правила составлены неверно' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['rules'], func = lambda msg:", "kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns = []", "text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup", "words[2] ) bot.delete_message( c.message.chat.id, words[1] ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c):", "'change_lang') kb.add(btn1, btn2) if utils.check_super_user(user_id): kb.add(types.InlineKeyboardButton(text = 'Админка бота', callback_data = 'admin_menu')) return", "= 'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить язык', callback_data = 'change_lang') kb.add(btn1, btn2)", "действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('time_ro_')) def ro_time_change(c): change_time = int(c.data.split('_')[2].split('::')[0]) chat_id", "= 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def remove_warns_kb(user_id): kb = types.InlineKeyboardMarkup(row_width=1)", "'Мои чаты', callback_data = 'my_chats') btn2 = types.InlineKeyboardButton(text = 'Изменить язык', callback_data =", "types.InlineKeyboardButton(text = '➖10', callback_data = 'welcome_timer_-10::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖5',", "msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['about'], parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private') def", "show_alert = True, text = 'У вас недостаточно прав для выполнения этого действия.", "['infolog'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_info_log(msg): if utils.check_log(msg.chat.id):", "недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('to_group_settings_menu')) def", "reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if msg.new_chat_member.is_bot and", "= 'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text", "= msg.chat.id, message_id = r.message_id, reply_markup = kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы", "'Изменения подтверждены.' ) else: t = Thread(target = utils.make_broadcast, kwargs = { 'is_test':", "их, в случае сбоя:\\n'+ujson.dumps(api.get_group_params(chat_id)) ) bot.answer_callback_query( c.id, text = 'Настройки отправлены', show_alert =", "stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg, stickerpack_name) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_ban'], func=lambda msg: msg.chat.type == 'supergroup')", "keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif", "1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = warns_settings_kb(chat_id)", "= 'Нет, не стоит', callback_data = 'reset_settings_abort::{chat_id}'.format(chat_id = msg.chat.id))) if utils.check_status(msg.from_user.id, msg.chat.id): bot.send_message(", "callback_data = 'welcome_timer_+5::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id", "= config.settings_states[settings['restrictions']['read_only']] api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup =", "time.time()-start_time) @bot.message_handler(commands=['about'], func=lambda msg: msg.chat.type == 'private') def bot_about(msg): start_time = time.time() bot.send_message(", "серверах придется указывать такой же IP, что и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT", "= utils.check_deleting_queue) t.start() async def handle(request): if request.match_info.get('token') == bot.token: request_body_dict = await", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_ban'],func=lambda msg: msg.chat.type == 'supergroup') def bot_stickerpack_ban(msg): start_time = time.time() if", "< 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "reply_markup = kb ) bot.send_message( msg.from_user.id, '<b>Настройки группы {}</b>'.format(msg.chat.title), reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg,", "utils.parse_user_id(c) if api.get_group_params(chat_id)['restrictions']['admins_only']: if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.unban_user_button(c) user = bot.get_chat_member( chat_id, user_id )", "'' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reset_settings'], func = lambda msg: msg.chat.type !=", "'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id, message_id=c.message.message_id, reply_markup=group_setting(utils.parse_chat_id(c)) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "рассылку', callback_data = 'broadcast_message::start') kb.add(btn1, btn2, btn3) return kb def generate_broadcast_check_menu_kb(): kb =", "c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c): words = re.split('::',", "def my_chats_list(c): user_id = c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id = c.message.chat.id,", "msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time = time.time() if msg.text == '/ban_me_please':", "= 'warns_count_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'warns_count_+2::{chat_id}'.format(chat_id =", "'private') def bot_group_start(msg): start_time = time.time() api.register_new_chat(msg.chat) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['get_logs'], func", "reply_markup=group_setting(msg.chat.id), parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type == 'private') def bot_user_start(msg):", "'➕10', callback_data = 'welcome_timer_+10::{chat_id}'.format(chat_id = chat_id)) kb.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text", "user_settings = ujson.loads(api.get_user_param(user_id, 'settings')) btns = [] for i in user_settings['admined_groups']: btn =", "message_id = c.message.message_id, text = '<b>Настройки группы {}</b>'.format(bot.get_chat(chat_id).title), parse_mode = 'HTML' ) bot.edit_message_reply_markup(", "lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if", "+ change_time if settings['restrictions']['for_time'] < 1: settings['restrictions']['for_time'] = 1 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id", "= 'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else:", "'not_enought_rights') elif utils.check_status(msg.reply_to_message.from_user.id, msg.chat.id): utils.send_err_report(msg, 'user_is_admin') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['donate']) def bot_donate(msg): start_time =", "print('Файл логов создан') telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info = logging.getLogger('main_info') report_info", "# ) @bot.message_handler(commands = ['version']) def bot_version(msg): bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['commands']['version'].format(version = text.VERSION), parse_mode", "'sticker', 'video', 'video_note', 'voice', 'location', 'contact'], func = lambda msg: not utils.check_status(msg.from_user.id, msg.chat.id))", "utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['for_time'] = settings['restrictions']['for_time'] + change_time if settings['restrictions']['for_time'] < 1:", "= 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Ограничения новых пользователей',", "import ujson import utils WEBHOOK_HOST = utils.get_my_ip() WEBHOOK_PORT = 8443 # 443, 80,", "as e: print(e) @bot.message_handler(commands = ['dellog'], func = lambda msg: msg.chat.type in ['group',", "топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп',", "msg.message_id ) bot.send_message( msg.chat.id, text.group_commands[utils.get_group_lang(msg.chat.id)]['restricted']['url'].format( user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name) ), parse_mode='HTML'", "рассылки{}'.format(config.settings_statuses[curr_settings['get_notifications']]), callback_data = 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]),", "def change_language(c): words = re.split('::', c.data) lang = words[1] bot.edit_message_text( chat_id = c.message.chat.id,", "chat_id = utils.parse_chat_id(c) state = c.data.split('_')[4].split('::')[0] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only']", "btn4 = types.InlineKeyboardButton(text = 'Read-only на сутки', callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1,", "= \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time = int(time.time())", "'private') def bot_kick(msg): start_time = time.time() utils.kick_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['ban', 'ban_me_please'],", "elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg, 'not_enought_rights') utils.new_update(msg, time.time()-start_time)", "sticker_id = utils.parse_arg(msg)[1] utils.unban_sticker(msg, sticker_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'not_enought_rights')", "msg.reply_to_message.from_user.id utils.unban_user(msg, user_id) elif utils.check_status(msg.from_user.id, msg.chat.id) and not utils.have_args(msg): utils.send_err_report(msg, 'no_args_provided') else: utils.send_err_report(msg,", "'to_groups_list')) return keyboard def welcome_settings_kb(chat_id): kb = types.InlineKeyboardMarkup(row_width = 4) curr_settings = api.get_group_params(chat_id)", "= api.get_unblocked_users_count(), unblocked_chats = api.get_unblocked_chats_count() ) ) @bot.callback_query_handler(func = lambda c: c.data ==", "c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_admin_menu_kb() ) @bot.callback_query_handler(func=lambda c: c.data.startswith('lang::')) def change_language(c):", "chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) # settings['']", "telebot from aiohttp import web from telebot import types import api import cherrypy", "lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id = c.from_user.id inviters =", "'logs.txt' f = open(log_name,'w') f.close() print('Файл логов создан') telebot_logger = logging.getLogger('telebot') mysql_info =", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings')) def welcome_settings(c): chat_id =", "c.data.startswith('kick_bots')) def kick_bots(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_main(chat_id, 'kick_bots') bot.edit_message_reply_markup( chat_id=c.message.chat.id,", "types.InlineKeyboardButton(text = 'Задержка перед удалением приветствия: {} сек.'.format(curr_settings['greeting']['delete_timer']), callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id))", "= 'HTML' ) @bot.message_handler(commands = ['leave'], func = lambda msg: msg.chat.type != 'private'", "= c.id, text = 'Изменения подтверждены.' ) else: t = Thread(target = utils.make_broadcast,", "chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func = lambda", "def warns_count_change(c): new_mod = int(c.data.split('_')[2].split('::')[0]) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings =", ") # @bot.message_handler(commands = ['voteban']) # def bot_voteban(msg): # utils.new_voteban(msg) # bot.send_message( #", "btn3) kb.add(btn4, btn5) return kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id,", "callback_data = 'settings_delete {} {}'.format(msg.message_id, r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id = r.message_id,", "= c.message.message_id, reply_markup = warns_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time))", "func = lambda msg: msg.chat.type != 'private') def bot_get_rules(msg): start_time = time.time() try:", "elif utils.check_for_forward(msg) and api.get_group_params(msg.chat.id)['deletions']['forward']: # bot.delete_message( # msg.chat.id, # msg.message_id # ) #", "должен быть открыт!) # На некоторых серверах придется указывать такой же IP, что", "working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str, uptime_sec", "msg_text_low = msg_text.lower() if utils.is_restricted(msg) and not utils.check_status(msg.from_user.id, msg.chat.id): bot.delete_message( msg.chat.id, msg.message_id )", "'➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data =", "bot = telebot.TeleBot(token = secret_config.token) my_info = bot.get_me() telebot_logger = logging.getLogger('telebot') sqlite_info =", "lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings = ujson.loads(api.get_bot_settings(secret_config.token)) if arg", "c.id, text = 'Переход выполнен' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('settings::')) def chat_settings(c):", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = delete_settings(chat_id) ) bot.answer_callback_query( callback_query_id", "['reset_settings'], func = lambda msg: msg.chat.type != 'private') def bot_reset_settings(msg): start_time = time.time()", ") bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func", "= 'Снятие ограничений разрешено для: {}'.format(config.new_users[curr_settings['restrictions']['admins_only']]), callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id =", "bot_reset_settings(msg): start_time = time.time() kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'Да, выполнить сброс', callback_data", "выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('new_users_restrictions')) def new_users_restrictions(c): chat_id =", "s = { 'users': 'пользователи', 'chats': 'диалоги', 'all': 'все' } btn1 = types.InlineKeyboardButton(text", "time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time", "'check_broadcast') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Рассылка", "= welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func", "= chat_id)) keyboard.add(btn1, btn2, btn3, btn4) keyboard.add(btn5, btn6) btn = types.InlineKeyboardButton(text = 'Снятие", "= 'Рассылка', callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text = 'Статистика', callback_data = 'stats_menu')", "= logging.getLogger('main_info') report_info = logging.getLogger('reports') if __name__ == '__main__': log_name = 'logs.txt' f", "= 'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text = 'В главное меню',", "api.get_group_params(msg.chat.id)['restrictions']['for_time'] ), reply_markup = unban_new_user_kb(msg), parse_mode = 'HTML' ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['restrictions']['for_time']*3600) if", "= 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id = c.id,", "new_state api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id)", "= 'get_notifications::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять ссылки{}'.format(config.settings_statuses[curr_settings['deletions']['url']]), callback_data =", "'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text =", ") @bot.message_handler(commands = ['settings'], func = lambda msg: msg.chat.type == 'supergroup') def bot_answ(msg):", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = generate_user_menu_kb(c.from_user.id) ) @bot.callback_query_handler(func =", "c: c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id, text = 'Выберите тип рассылки'", "r.message_id))) bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id = r.message_id, reply_markup = kb ) bot.send_message(", "= chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings", "@bot.message_handler(commands = ['get_id']) def bot_get_id(msg): bot.send_message( msg.chat.id, msg.chat.id ) # @bot.message_handler(commands = ['voteban'])", "= logging.getLogger('main_info') report_info = logging.getLogger('reports') print('Список логгеров создан') logging.basicConfig( format='%(filename)s [LINE:%(lineno)-3d]# %(levelname)-8s -", "@bot.message_handler(commands = ['ban', 'ban_me_please'], func = lambda msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg):", "callback_data = 'to_main_menu')) return kb def generate_broadcast_settings_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 2) btn1", "msg.chat.id, msg.new_chat_member.id ) bot.send_message( msg.chat.id, text.group_commands['ru']['restricted']['global_ban'].format( user_id = msg.new_chat_member.id, user_name = msg.new_chat_member.first_name ),", "bot.delete_message( c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c):", "kb = types.InlineKeyboardMarkup(row_width = 1) btn1 = types.InlineKeyboardButton(text = 'Рассылка-проверка', callback_data = 'check_broadcast')", "'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func = lambda c:", "import threading import time from multiprocessing import Process as Thread import telebot from", "kb.add(btn4, btn5) return kb def generate_user_groups(user_id): kb = types.InlineKeyboardMarkup(row_width=2) user_settings = ujson.loads(api.get_user_param(user_id, 'settings'))", "new_rules = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_rules): utils.set_rules(msg, new_rules) bot.send_message( msg.chat.id, 'Правила изменены' ) else:", "# chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): # settings = api.get_group_params(chat_id) #", "chat_id)) keyboard.add(btn) btn1 = types.InlineKeyboardButton(text = 'Ничего', callback_data = 'warns_action_0::{chat_id}'.format(chat_id = chat_id)) btn2", "if i.user.id == my_info.id: status2 = True if status1 is True and status2", "kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r = bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(msg.chat.id)]['remove_keyboard'], reply_markup =", "'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time =", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c) if", "'private') def bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name =", "bot.edit_message_text( text = 'Предупреждения обнулены.', chat_id = c.message.chat.id, message_id = c.message.message_id ) else:", "text = text.user_messages['start'], parse_mode = 'HTML' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id =", "c.message.chat.id, c.message.message_id ) # @bot.callback_query_handler(func = lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): #", "'0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь", "'➕1', callback_data = 'time_ro_+1::{chat_id}'.format(chat_id = chat_id)) btn4 = types.InlineKeyboardButton(text = '➕2', callback_data =", "= 'Автоматический read-only на {} час - {}'.format(curr_settings['restrictions']['for_time'], config.settings_statuses[curr_settings['restrictions']['read_only']]), callback_data = 'read_only::{chat_id}'.format(chat_id =", "callback_data = 'warns_action_3::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn1, btn2, btn3, btn4) btn = types.InlineKeyboardButton(text =", "func = lambda msg: msg.chat.type != 'private') def bot_reset_settings(msg): start_time = time.time() kb", "bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'Вы не являетесь администратором.", "callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id = msg.chat.id keyboard =", "def bot_sticker_unban(msg): start_time = time.time() if utils.have_args(msg) and utils.check_status(msg.from_user.id, msg.chat.id): sticker_id = utils.parse_arg(msg)[1]", "def bot_user_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): words = utils.parse_arg(msg)[1]", "'new_chat_title', 'new_chat_photo', 'delete_chat_photo', 'group_chat_created', 'supergroup_chat_created', 'channel_chat_created', 'migrate_to_chat_id', 'migrate_from_chat_id', 'pinned_message' ]) def bot_check_system(msg): start_time", "c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) curr_state", "c.message.message_id ) bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['accepted'] ) bot.leave_chat( c.message.chat.id ) else: bot.send_message( c.message.chat.id, text.group_commands[utils.get_group_lang(c.message.chat.id)]['leave']['cancelled']", "c.message.message_id, reply_markup = generate_broadcast_vars_menu_kb() ) @bot.callback_query_handler(func = lambda c: c.data == 'check_broadcast') def", "btn4 = types.InlineKeyboardButton(text = '➕2', callback_data = 'time_ro_+2::{chat_id}'.format(chat_id = chat_id)) btn5 = types.InlineKeyboardButton(text", "и выше WEBHOOK_LISTEN = '0.0.0.0' WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV", "= types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard", "text.group_commands[utils.get_group_lang(msg.chat.id)]['leave']['question'], reply_markup = generate_leave_kb(msg), parse_mode = 'HTML' ) @bot.message_handler(commands = ['rmkb'], func =", "lambda c: c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id user_settings =", "curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users': 'пользователи', 'chats': 'диалоги', 'all': 'все' }", "= random.randint(1, 10) ban_time = 60*t try: if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id,", "= int(i['COUNT(`inviter`)']) ) bot.send_message( user_id, m, parse_mode = 'HTML' ) bot.answer_callback_query( c.id, text", "= True if status1 is True and status2 is True: utils.remove_log_channel(msg.chat.id) elif status1", "def bot_ping(msg): start_timee = time.time() uptime = datetime.timedelta(seconds = int(time.time()-start_time)) working_time = datetime.timedelta(seconds", "func=lambda msg: msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id):", "e: logging.error(e) else: utils.ban_user(msg) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['ping']) def bot_ping(msg): start_timee = time.time() uptime", "'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id user_settings = api.get_user_param(user_id, 'settings') bot.edit_message_text( chat_id =", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", "0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id,", "kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'to_main_menu')) bot.edit_message_reply_markup( chat_id", ") @bot.callback_query_handler(func = lambda c: c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id =", "bot.delete_message( msg.chat.id, msg.message_id ) if msg_text_low.startswith('разбан'): if utils.check_super_user(msg.from_user.id): utils.global_unban(msg) elif msg_text.lower() in ['глобал", "@bot.message_handler(content_types=['text'], func = lambda msg: msg.chat.type != 'private') def bot_check_text(msg): start_time = time.time()", "= utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count", "bot.forward_message(secret_config.official_chat, msg.chat.id, msg.message_id) bot.pin_chat_message( r.chat.id, r.message_id ) @bot.message_handler(commands =['setlog'], func = lambda msg:", "text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not True: bot.send_message( msg.chat.id, text =", "print(e) @bot.message_handler(commands = ['infolog'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def", "= api.get_group_params(chat_id) settings['warns']['count'] = settings['warns']['count'] + change_count if settings['warns']['count'] < 1: settings['warns']['count'] =", "api.get_group_params(chat_id) for cont_type in config.available_attachments: btn = types.InlineKeyboardButton(text=config.available_attachments_str[cont_type].format(config.settings_statuses[curr_settings['deletions']['files'][cont_type]]), callback_data='delete::{content_type}::{chat_id}'.format(content_type = cont_type, chat_id =", "= lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) # if", "words[1] ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_get')) def get_welcome_text(c): chat_id = utils.parse_chat_id(c) bot.send_message(", "= '➖2', callback_data = 'warns_count_-2::{chat_id}'.format(chat_id = chat_id)) btn2 = types.InlineKeyboardButton(text = '➖1', callback_data", "time.time() if utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types =", "= c.message.message_id ) utils.add_to_delete_queue(chat_id, c.message.message_id, api.get_group_params(chat_id)['greeting']['delete_timer']) else: bot.answer_callback_query( callback_query_id = c.id, show_alert =", "utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['admins_only'] = utils.to_bool(state) api.change_group_params(chat_id, ujson.dumps(settings)) bot.edit_message_reply_markup( chat_id = c.message.chat.id,", "отправлен', show_alert = True ) @bot.callback_query_handler(func = lambda c: c.data in ['my_chats', 'to_groups_list'])", "['group', 'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id, msg.chat.id) and msg.forward_from_chat.id ==", ") bot.answer_callback_query( callback_query_id = c.id, text = 'Изменения подтверждены. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']])", "'Выбор языка'.format(config.settings_statuses['get_notifications']), callback_data='open_lang_menu') keyboard.add(btn) return keyboard def delete_settings(chat_id): keyboard = types.InlineKeyboardMarkup(row_width=1) curr_settings =", "= 'empty_callback') btn5 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data = 'broadcast_check::start') kb.add(btn1, btn2,", "'file_id': '', 'user_id': c.from_user.id, 'message_id': c.message.message_id } ) kb = types.InlineKeyboardMarkup() kb.add(types.InlineKeyboardButton(text =", "'Просмотреть сообщение', callback_data = 'broadcast_message::show') btn3 = types.InlineKeyboardButton(text = 'Начать рассылку', callback_data =", "bot.answer_callback_query( c.id, text = 'Настройки отправлены', show_alert = True ) @bot.callback_query_handler(func = lambda", "chat_id)) kb.add(btn) return kb def new_users_restrictions_kb(chat_id): keyboard = types.InlineKeyboardMarkup(row_width = 4) curr_settings =", "msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format( t = t ), parse_mode =", "time.time()-start_timee) @bot.message_handler(content_types=['new_chat_members']) def bot_users_new(msg): start_time = time.time() api.register_new_chat(msg.chat) chat_id = msg.chat.id utils.new_member_logs(msg) if", "(WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time = int(time.time()) bot = telebot.TeleBot(token", "= lambda msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time = time.time() if msg.text", "open(log_name,'w') f.close() print('Файл логов создан') telebot_logger = logging.getLogger('telebot') mysql_info = logging.getLogger('mysql') main_info =", "message = msg start_time = time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1]", "= 'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id, msg.new_chat_member.id ) bot.send_message(", "msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else: bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg,", "bot.send_message( msg.chat.id, text.user_messages[utils.get_user_lang(msg)]['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['start'], func=lambda msg: msg.chat.type != 'private')", "msg.chat.id, text.group_commands['ru']['restricted']['bot'], parse_mode = 'HTML', reply_markup = types.ReplyKeyboardRemove() ) elif utils.check_global_ban(msg): bot.kick_chat_member( msg.chat.id,", "chat_id = utils.get_log_id(msg.chat.id), chat_name = bot.get_chat(utils.get_log_id(msg.chat.id)).title ) else: m = text.group_commands[utils.get_group_lang(msg.chat.id)]['log_channel']['info']['is_off'] bot.send_message( msg.chat.id,", "text = 'Переход выполнен.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('delete::')) def group_settings_deletions(c): chat_id", "= ['ban', 'ban_me_please'], func = lambda msg: msg.chat.type == 'supergroup') def bot_ban_me_please(msg): start_time", "bot.reply_to(msg, \"<code>'{}': '{}',</code>\".format(msg.photo[0].file_id, msg.caption), parse_mode ='HTML') utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['sticker'], func =", "== 'creator': status1 = True if i.user.id == my_info.id: status2 = True if", "if not utils.check_status(msg.from_user.id, msg.chat.id): bot.restrict_chat_member( msg.chat.id, msg.from_user.id, until_date=str(time.time() + ban_time)) bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['ban_me_please'].format(", "callback_query_id = c.id, text = 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('warns_del'))", "'warns_action_1::{chat_id}'.format(chat_id = chat_id)) btn3 = types.InlineKeyboardButton(text = 'Бан', callback_data = 'warns_action_2::{chat_id}'.format(chat_id = chat_id))", "= msg_id, user_id = msg.from_user.id, user_name = api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception", "msg: msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg): kb = types.ReplyKeyboardMarkup(one_time_keyboard=True) kb.add(types.KeyboardButton(text='/rmkb')) r =", "+= 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id = inviter_info.user.id, inviter_firstname =", "utils.check_text(new_greeting): utils.set_greeting(msg, new_greeting) bot.send_message( msg.chat.id, 'Приветствие изменено' ) else: bot.send_message( msg.chat.id, text =", "инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) keyboard.add(types.InlineKeyboardButton(text = 'К списку групп', callback_data", "types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Удалить предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn)", "= time.time() if msg.text == '/ban_me_please': t = random.randint(1, 10) ban_time = 60*t", "статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['url']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text", "def group_settings_deletions(c): chat_id = utils.parse_chat_id(c) cont_type = re.split('::', c.data)[1] if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_files(chat_id,", "btn = types.InlineKeyboardButton(text = 'Показать приветствие', callback_data = 'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn", "msg if len(msg.text) not in [9, 21]: new_greeting = msg.text[len(msg.text):msg.entities[0].length:-1][::-1] if utils.check_text(new_greeting): utils.set_greeting(msg,", "parse_mode='HTML' ) except Exception as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'], parse_mode = 'HTML'", "callback_data = 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id == secret_config.channel_ID)", "text = 'У вас недостаточно прав для выполнения этого действия. Текущий статус настройки:", "= c.id, text = 'Изменения подтверждены.' ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert", "utils.check_deleting_queue) t.start() async def handle(request): if request.match_info.get('token') == bot.token: request_body_dict = await request.json()", "can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name =", "'warns_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка приветствий', callback_data = 'welcome_settings::{chat_id}'.format(chat_id", "bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not True: bot.send_message( msg.chat.id,", "bot.send_message( msg.chat.id, m, parse_mode = 'HTML' ) @bot.message_handler(commands = ['leave'], func = lambda", "chat_id, user_id ) bot.edit_message_text( text = text.group_commands[utils.get_group_lang(c.message.chat.id)]['restricted']['new_user']['button_pressed'].format( user_id = user.user.id, user_name = api.replacer(user.user.first_name)", "предупреждения', callback_data = 'delete_warns::{user_id}'.format(user_id = user_id)) kb.add(btn) return kb def unban_new_user_kb(msg): kb =", "'Вы действительно хотите сбросить настройки?', reply_markup = kb ) @bot.message_handler(commands = ['update_time'], func", "= lambda c: c.data.startswith('get_chat_refs::')) def bot_get_chat_refs(c): chat_id = utils.parse_chat_id(c) user_id = c.from_user.id inviters", "@bot.message_handler(commands=['donate']) def bot_donate(msg): start_time = time.time() bot.send_message( msg.chat.id, text.group_commands['ru']['donate'], parse_mode = 'HTML' )", "bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = create_user_language_keyboard() ) bot.answer_callback_query( callback_query_id", "подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id = utils.parse_chat_id(c) if", "utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query(", "= 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) return keyboard def generate_leave_kb(msg): chat_id = msg.chat.id", "message_id = c.message.message_id, text = 'Ваше меню' ) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id", "callback_data = 'new_restrictions_admins_only_{state}::{chat_id}'.format(state = config.settings_states[curr_settings['restrictions']['admins_only']], chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text =", "['update_time'], func = lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time =", "'HTML' ) @bot.message_handler(commands=['ro'], func=lambda msg: msg.chat.type == 'supergroup') def bot_users_ro(msg): start_time = time.time()", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Настройка предупреждений', callback_data = 'warns_settings::{chat_id}'.format(chat_id =", "== user_id: if i.status == 'creator': status1 = True if i.user.id == my_info.id:", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Фильтры', callback_data='deletions_settings::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn", "btn2, btn3, btn4) btn = types.InlineKeyboardButton(text = 'Действие при максимальном кол-ве варнов: {}'.format(config.warns_states[curr_settings['warns']['action']]),", "Путь к приватному ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\"", "= working_time_str.replace(working_time_str.split(',')[0], utils.get_text_translation(working_time_str.split(',')[0], 'ru')) bot.send_message( msg.chat.id, text.user_messages['ru']['commands']['ping'].format( unix_time = datetime.datetime.fromtimestamp(int(time.time())), working_time = working_time_str,", "def bot_new_warn(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and msg.reply_to_message is not None", "c: c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id = c.message.message_id,", "= bot.get_chat_member(chat_id, i['inviter']) counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id", ") @bot.callback_query_handler(func = lambda c: c.data.startswith('broadcast_check')) def bot_broadcast_check(c): arg = c.data.split('::')[1] curr_bot_settings =", "c.data.startswith('warns_del')) def del_warns(c): user_id = utils.parse_user_id(c) chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): api.zeroing_warns(user_id,", "Start aiohttp server web.run_app( app, host=WEBHOOK_LISTEN, port=WEBHOOK_PORT, ssl_context=context, ) # bot.remove_webhook() # bot.polling()", "btn = types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns) kb.add(types.InlineKeyboardButton(text", "'HTML', chat_id = c.message.chat.id, message_id = c.message.message_id ) utils.add_to_delete_queue(msg.chat.id, r.message_id, api.get_group_params(msg.chat.id)['greeting']['delete_timer']) else: bot.answer_callback_query(", "lambda c: c.data.startswith('settings_captcha')) # def change_captcha_settings(c): # chat_id = utils.parse_chat_id(c) # if utils.check_status(c.from_user.id,", "generate_broadcast_check_menu_kb(): kb = types.InlineKeyboardMarkup(row_width = 3) curr_settings = ujson.loads(api.get_bot_settings(secret_config.token)) s = { 'users':", "= utils.parse_chat_id(c) bot.edit_message_reply_markup( chat_id = c.message.chat.id, message_id = c.message.message_id, reply_markup = new_users_restrictions_kb(chat_id) )", "callback_data = 'broadcast_check::start') kb.add(btn1, btn2, btn3) kb.add(btn4, btn5) return kb def generate_user_groups(user_id): kb", "= 'Изменения подтверждены.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_state')) def welcome_settings_state(c): chat_id =", "utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id = msg.from_user.id try: admins =", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('unban_new_user'))", "api.get_group_params(chat_id) settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0", "update = telebot.types.Update.de_json(request_body_dict) bot.process_new_updates([update]) return web.Response() else: return web.Response(status=403) app.router.add_post('/{token}/', handle) def create_user_language_keyboard():", "callback_data='leave_cancel::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Нет, останься', callback_data='leave_confirm::{chat_id}'.format(chat_id = chat_id))", "= text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['user_is_not_creator'] ) elif status2 is not True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin']", "= c.message.chat.id, message_id = c.message.message_id, reply_markup = kb ) t.start() t.join() @bot.callback_query_handler(func =", "btn = types.InlineKeyboardButton(text = 'Ограничения новых пользователей', callback_data = 'new_users_restrictions::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn)", "'Данное приветствие не работает' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['kick'], func=lambda msg: msg.chat.type != 'private')", "bot.reply_to( msg, text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['prefix'].format( reason = text.group_commands[utils.get_group_lang(msg.chat.id)]['errors']['reasons']['user_is_admin'] ), parse_mode='HTML' ) except Exception as e:", "group_setting(chat_id), ) @bot.callback_query_handler(func = lambda c: c.data == 'to_main_menu') def bot_to_main_menu(c): bot.edit_message_text( chat_id", "вас недостаточно прав для выполнения этого действия.' ) @bot.callback_query_handler(func = lambda c: c.data.startswith('welcome_settings'))", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['stickerpack_unban'], func=lambda msg: msg.chat.type != 'private') def bot_stickerpack_unban(msg): start_time = time.time()", "user_name = msg.new_chat_member.first_name ), parse_mode = 'HTML' ) else: utils.new_user_in_chat(msg) if utils.need_greeting(msg): r", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands = ['reregister'], func = lambda msg: msg.chat.type == 'supergroup') def", "show_alert = True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]])", "bot.edit_message_reply_markup( chat_id = msg.chat.id, message_id = r.message_id, reply_markup = kb ) bot.send_message( msg.from_user.id,", "lambda c: c.data.startswith('get_settings_json')) def bot_get_settings_json(c): chat_id = utils.parse_chat_id(c) bot.send_message( chat_id = c.from_user.id, text", "keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id = chat_id))", "= lambda c: c.data == 'admin_menu') def bot_admin_menu(c): bot.edit_message_text( chat_id = c.message.chat.id, message_id", "{}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) ) else: bot.answer_callback_query( callback_query_id = c.id, show_alert = True, text = 'Вы", "reset_settings_button(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): if c.data.startswith('reset_settings_confirmation'): api.register_new_chat(c.message.chat) api.change_group_params(chat_id, ujson.dumps(config.default_group_settings)) bot.send_message(", "c.message.chat.id, message_id = c.message.message_id, reply_markup = welcome_settings_kb(chat_id) ) bot.answer_callback_query( callback_query_id = c.id, text", "generate_leave_kb(msg): chat_id = msg.chat.id keyboard = types.InlineKeyboardMarkup(row_width=1) btn = types.InlineKeyboardButton(text = 'Да, выйди", "chat_id = msg.chat.id utils.new_member_logs(msg) if api.get_group_params(msg.chat.id)['deletions']['system']: bot.delete_message( msg.chat.id, msg.message_id ) if msg.chat.type ==", "ключу WEBHOOK_URL_BASE = \"https://%s:%s\" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = \"/%s/\" % (secret_config.token) start_time", "bot_stickerpack_unban(msg): start_time = time.time() if utils.check_status(msg.from_user.id, msg.chat.id) and utils.have_args(msg): stickerpack_name = utils.parse_arg(msg)[1] utils.unban_stickerpack(msg,", "True: bot.send_message( msg.chat.id, text = text.group_commands[utils.get_group_lang(chat_id)]['log_channel']['confirmation']['errors']['bot_is_not_admin'] ) except Exception as e: print(e) @bot.message_handler(commands", "parse_mode='HTML' ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['warn'], func=lambda msg: msg.chat.type != 'private') def bot_new_warn(msg): start_time", ") if user.status in ['restricted']: bot.restrict_chat_member( chat_id, user_id, can_send_media_messages=True, can_add_web_page_previews=True, can_send_messages=True, can_send_other_messages=True )", "= True, text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)['deletions']['system']]) )", "= chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Получить топ инвайтеров', callback_data = 'get_chat_refs::{chat_id}'.format(chat_id", "func = lambda msg: msg.chat.type != 'private') def bot_check_text(msg): start_time = time.time() msg_text", "user_settings['admined_groups']: btn = types.InlineKeyboardButton(text = i['title'], callback_data = 'settings::{chat_id}'.format(chat_id = i['chat_id'])) btns.append(btn) kb.add(*btns)", "['rmkb'], func = lambda msg: msg.chat.type in ['group', 'supergroup']) def bot_remove_kb(msg): kb =", "types.InlineKeyboardMarkup() for i in config.languages: lang_keyboard.add(types.InlineKeyboardButton(text = i['title'], callback_data = 'lang::{lang_code}'.format(lang_code = i['code'])))", "4) curr_settings = api.get_group_params(chat_id) btn = types.InlineKeyboardButton(text = 'Отправлять приветствие в чат: {}'.format(config.settings_statuses[curr_settings['greeting']['is_enabled']]),", "else: txt = text.reports_messages['report']['to_admin']['no_username'] for i in admins: try: bot.send_message( i.user.id, txt.format( group_name", "@bot.callback_query_handler(func = lambda c: c.data == 'stats_menu') def bot_stats_menu(c): bot.edit_message_text( chat_id = c.message.chat.id,", "= 'to_main_menu')) return kb @bot.channel_post_handler(content_types=['text'], func = lambda msg: msg.chat.id == secret_config.channel_ID) def", "text = 'Вы не являетесь администратором. Текущий статус настройки: {}'.format(config.settings_statuses[api.get_group_params(chat_id)[c.data.split('::')[0]]]) ) @bot.callback_query_handler(func =", "def new_users_ro(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): settings = api.get_group_params(chat_id) settings['restrictions']['read_only'] =", "= 'del_url::{chat_id}'.format(chat_id = chat_id)) keyboard.add(btn) btn = types.InlineKeyboardButton(text = 'Удалять системные сообщения{}'.format(config.settings_statuses[curr_settings['deletions']['system']]), callback_data", "if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'], reply_markup=generate_user_menu_kb(msg.from_user.id) ) api.register_new_user(msg.from_user, 'ru') else:", "utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time = time.time()", "lambda msg: msg.chat.type in ['group', 'supergroup'] and msg.forward_from_chat is not None and utils.check_status(msg.from_user.id,", "@bot.message_handler(commands = ['settings'], func = lambda msg: msg.chat.type == 'supergroup') def bot_answ(msg): start_time", "lambda msg: msg.chat.type == 'supergroup') def bot_answ(msg): start_time = time.time() message = msg", "== '/ban_me_please': t = random.randint(1, 10) ban_time = 60*t try: if not utils.check_status(msg.from_user.id,", "bot.send_message( msg.chat.id, 'Вы действительно хотите сбросить настройки?', reply_markup = kb ) @bot.message_handler(commands =", "msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['report']) def bot_report(msg): start_time = time.time() admins = bot.get_chat_administrators(msg.chat.id)", "c.data in ['my_chats', 'to_groups_list']) def my_chats_list(c): user_id = c.from_user.id user_settings = api.get_user_param(user_id, 'settings')", "utils.is_restricted(msg) or utils.is_sticker_restricted(msg): bot.delete_message( msg.chat.id, msg.message_id ) utils.new_update(msg, time.time()-start_time) @bot.message_handler(content_types = ['audio', 'document',", "func = lambda msg: utils.check_super_user(msg.from_user.id)) def bot_update_time(msg): bot_ping(msg) subprocess.run(\"timedatectl set-time '{time}'\".format(time = datetime.datetime.fromtimestamp(msg.date+1).strftime(\"%Y-%m-%d", "reply_markup = new_users_restrictions_kb(chat_id) ) @bot.callback_query_handler(func = lambda c: c.data.startswith('read_only')) def new_users_ro(c): chat_id =", "'not_enought_rights') utils.new_update(msg, time.time()-start_time) @bot.message_handler(commands=['sticker_unban'], func=lambda msg: msg.chat.type == 'supergroup') def bot_sticker_unban(msg): start_time =", "start_time = time.time() if utils.is_user_new(msg): if utils.have_args(msg): referrer = utils.parse_arg(msg)[1] bot.send_message( msg.chat.id, text.user_messages['start'],", "settings['greeting']['delete_timer'] = settings['greeting']['delete_timer'] + change_count if settings['greeting']['delete_timer'] < 0: settings['greeting']['delete_timer'] = 0 api.change_group_params(chat_id,", "counter += 1 m += text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['body'].format( inviter_pos = counter, inviter_id = inviter_info.user.id, inviter_firstname", "utils.global_unban(msg) elif msg_text.lower() in ['глобал бан']: if utils.check_super_user(msg.from_user.id): utils.global_ban(msg) elif not utils.check_status(msg.from_user.id, msg.chat.id):", "'Ввести сообщение', callback_data = 'broadcast_message::input') btn2 = types.InlineKeyboardButton(text = 'Просмотреть сообщение', callback_data =", "= api.replacer(msg.from_user.first_name), ), parse_mode='HTML' ) except Exception as e: print(e) bot.reply_to( msg, text.reports_messages['report']['to_user'],", "msg.forward_from_chat.id == utils.get_log_id(msg.chat.id) and utils.check_log(msg.chat.id) ) def bot_del_log(msg): print(1) user_id = msg.from_user.id try:", "m, parse_mode = 'HTML' ) bot.answer_callback_query( c.id, text = 'Список отправлен', show_alert =", "= 2) btn1 = types.InlineKeyboardButton(text = 'Рассылка', callback_data = 'broadcast_menu') btn2 = types.InlineKeyboardButton(text", "lambda c: c.data.startswith('del_system')) def del_system(c): chat_id = utils.parse_chat_id(c) if utils.check_status(c.from_user.id, utils.parse_chat_id(c)): utils.change_state_deletions_main(chat_id, 'system')", "msg.from_user.id, utils.generate_rules_text(msg), parse_mode = 'HTML' ) except Exception: bot.reply_to( msg, text = ''", "msg, 'Настройки отправлены вам в личные сообщения', ) kb.add(types.InlineKeyboardButton(text = 'Удалить', callback_data =", "reply_markup = kb ) bot.delete_message( msg.chat.id, r.message_id ) bot.delete_message( msg.chat.id, msg.message_id ) @bot.message_handler(commands", "text.group_commands[utils.get_group_lang(chat_id)]['refs_stats']['header'] counter = 0 for i in inviters: inviter_info = bot.get_chat_member(chat_id, i['inviter']) counter", "btn2 = types.InlineKeyboardButton(text = 'Рассылка сообщения', callback_data = 'broadcast_settings') kb.add(btn1, btn2) kb.add(types.InlineKeyboardButton(text =", "'welcome_get::{chat_id}'.format(chat_id = chat_id)) kb.add(btn) btn = types.InlineKeyboardButton(text = 'Назад', callback_data='to_group_settings_menu::{chat_id}'.format(chat_id = chat_id)) kb.add(btn)", "bot.send_message( msg.chat.id, 'Приветствие изменено' ) else: bot.send_message( msg.chat.id, text = 'Данное приветствие не", "= bot.get_chat_administrators(msg.chat.id) chat = bot.get_chat(msg.chat.id) msg_id = '' if chat.username: if msg.reply_to_message: msg_id", "= lambda c: c.data.startswith('settings_delete')) def del_settings(c): words = c.data.split() bot.delete_message( c.message.chat.id, words[2] )" ]
[ "MONGO_HOST = 'localhost' MONGO_PORT = 27017 MONGO_DATABASE = 'nomad' REDIS_HOST = 'localhost' REDIS_PORT", "27017 MONGO_DATABASE = 'nomad' REDIS_HOST = 'localhost' REDIS_PORT = 6379 REDIS_DB = 0", "= 'localhost' MONGO_PORT = 27017 MONGO_DATABASE = 'nomad' REDIS_HOST = 'localhost' REDIS_PORT =", "'localhost' MONGO_PORT = 27017 MONGO_DATABASE = 'nomad' REDIS_HOST = 'localhost' REDIS_PORT = 6379", "= 27017 MONGO_DATABASE = 'nomad' REDIS_HOST = 'localhost' REDIS_PORT = 6379 REDIS_DB =", "MONGO_PORT = 27017 MONGO_DATABASE = 'nomad' REDIS_HOST = 'localhost' REDIS_PORT = 6379 REDIS_DB" ]
[ "python3 # -*- coding: utf-8 -*- \"\"\" Created on Wed Mar 20 22:08:31", "by a point and then a mask) that covered the aforementioned minterms. \"\"\"", "want to expand the code and allow different iteration over the inputs. If", "to exit a loop _continue = False # Convert the index into the", "def __repr__(self): return self.__str__() def Expand(truth_table, dim): # # Iterate over every boolean", "if bln is T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] =", "different iteration over the inputs. If this returned randomly, the code would no", "the idx we see above), and keeps track of which minterms are related", "= False D = \"_\" \"\"\" Generate the inputs for the algorithm. A", "the space of the cube space = [idx] covered_minterms = {idx} if idx", "# elements into it. space.extend(new_space) for ns in new_space: # If the value", "for boolean_array in truth_table: bln2minterms= dict() minterms2bln = dict() for idx, bln in", "returned randomly, the code would no longer necessarily output predictable results. \"\"\" def", "question here really is what is a booleanish? in addition to True and", "over them for correlated_minterm in minterms_correlated_to_bln: # and remove the boolean from their", "# WE found one! # Take it required_bln = set_of_blns.pop() # Now find", "minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate = bln most = len(bln2minterms[bln]) required_blns.add( best_candidate", "and allow different iteration over the inputs. If this returned randomly, the code", "minterm = Select_Minterm(minterms2bln) most = 0 best_candidate = None # We determine the", "valid space. If it is valid, add the expanding indices into list if", "for correlated_minterm in minterms_correlated_to_bln: # Delete all minterms correlated to the highest-scoring boolean", "if not _continue: # We like the new dimension, and are going to", "class boolean_expression(object): def __init__(self, idx, mask): self.idx = idx self.mask = mask def", "to the highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete", "# Iterate over every boolean output # expr_per_output = list() for boolean_array in", "best candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This is a helper function,", "T, 7: T, } tt.append(e) return tt \"\"\" This is a helper function,", "= False # Convert the index into the representitive integer dim2int = 2**i", "a period of multiple timesteps. Each dictionary represents a boolean function. The key", "= boolean_expression(idx, 0) # Define the space of the cube space = [idx]", "and 0s into 1s, depending on the index new_index = index ^ dim2int", "expanding the cube, verify that we're expanding it into # valid space. If", "If it is valid, add the expanding indices into list if new_index in", "# We are looking at only a SINGLE minterm. # Scanning a subspace", "the Covered Minterms if boolean_array[ns] is T: covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr)", "minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned best", "in minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate = bln most = len(bln2minterms[bln]) required_blns.add(", "in linear time. minterm = Select_Minterm(minterms2bln) most = 0 best_candidate = None #", "# are two dictionaries that are dually referent # in order to keep", "Arguments: LEN : integer - The dimensionality of the desired output truth table.", "correlated to the highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal,", "def Expand(truth_table, dim): # # Iterate over every boolean output # expr_per_output =", "required_bln = set_of_blns.pop() # Now find all the minterms related to the boolean", "between boolean statements and # the minterms they cover # lom = list()", "display over a period of multiple timesteps. Each dictionary represents a boolean function.", "it into # valid space. If it is valid, add the expanding indices", "SINGLE minterm. # Scanning a subspace to decrease overall computation time # and", "bln2minterms[best_candidate] # Iterate over them todelete = list() for correlated_minterm in minterms_correlated_to_bln: #", "for i in todelete: del minterms2bln[i] # Now we get rid of booleans", "determine the \"Best candidate\" as the boolean expression # with the greatest number", "The space in the boolean array we are beginning at, where the expansion", "def __eq__(self, b): return self.idx == b.idx and self.mask == b.mask def __hash__(self):", "we are beginning at, where the expansion begins from. dim: int - The", "is specifically # True and not just Don't Care, add it to the", "WE found one! # Take it required_bln = set_of_blns.pop() # Now find all", "\"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): # # Define truth table #", "one and move on to the next # dimension. if not _continue: #", "3: T, 4: F, 5: F, 6: T, 7: T, } tt.append(e) return", "to expand through ever dimension, one at a time. While it does this,", "it maps the boolean expressions to minterms and it maps the minterms to", "list() for boolean_array in truth_table: bln2minterms= dict() minterms2bln = dict() for idx, bln", "idx, bln in boolean_array.items(): if bln is T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx,", "idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms and minterms2bln # are two", "the highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete them", "= list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) ==", "self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__() def Expand(truth_table,", "booly-books del bln2minterms[required_bln] todelete.append(minterm) # And remember what we've done on this day,", "the cube space = [idx] covered_minterms = {idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr)", "related to the boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over them for correlated_minterm", "list (dict (int, Booleanish)) - The only question here really is what is", "F, 5: F, 6: T, 7: T, } tt.append(e) return tt \"\"\" This", "boolean variables. Return: tt (truth table): list (dict (int, Booleanish)) - The only", "of the list represents a different boolean expression input -- say if we", "then a mask) that covered the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim,", "# Convert the index into the representitive integer dim2int = 2**i # The", "- The only question here really is what is a booleanish? in addition", "index into the representitive integer dim2int = 2**i # The space being explored", "track of which minterms are related to what boolean expressions. Return: covered_minterms: set", "randomly, the code would no longer necessarily output predictable results. \"\"\" def _Provide_Index(dim):", "boolean expressions to minterms and it maps the minterms to the boolean expressions.", "best candidate while len(minterms2bln): # We are looking at only a SINGLE minterm.", "# Now find all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[required_bln]", "of dimensions we are operating in. # REFERENCED BY VALUE minterms2bln: dict (int,", "ndelete the aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This is", "of the cube space = [idx] covered_minterms = {idx} if idx in minterms2bln:", "we're expanding it into # valid space. If it is valid, add the", "list of dictionarys. Each element of the list represents a different boolean expression", "input -- say if we are trying to reduce something for a 7", "correlated_minterm in minterms_correlated_to_bln: # Delete all minterms correlated to the highest-scoring boolean for", "{bln_expr} # Iterate over the indices however we decide for i in _Provide_Index(dim):", "lom = list() # Iterate over every solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps)", "T, 4: F, 5: F, 6: T, 7: T, } tt.append(e) return tt", "== b.idx and self.mask == b.mask def __hash__(self): return hash((self.idx, self.mask)) def __str__(self):", "We need to turn 1s into 0s and 0s into 1s, depending on", "the boolean expressions to minterms and it maps the minterms to the boolean", "= list() for index in space: # MAGIC LINE # We need to", "allow different iteration over the inputs. If this returned randomly, the code would", "the inputs for the algorithm. A list of dictionarys. Each element of the", "to the Covered Minterms if boolean_array[ns] is T: covered_minterms.add(ns) if ns in minterms2bln:", "would no longer necessarily output predictable results. \"\"\" def _Provide_Index(dim): return range(0, dim)", "integer dim2int = 2**i # The space being explored new_space = list() for", "if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} # Allow the Mask", "# in order to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect(", "set() todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items(): if", "in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} # Iterate over the indices however", "- The space in the boolean array we are beginning at, where the", "which minterms are related to what boolean expressions. Return: covered_minterms: set - The", "yet supported. Do whatever you want in here. Arguments: LEN : integer -", "\"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr = boolean_expression(idx, 0) # Define the", "index _continue = True break # We don't want to extend into the", "for ns in new_space: # If the value at the boolean array is", "boolean_expression(object): def __init__(self, idx, mask): self.idx = idx self.mask = mask def __eq__(self,", "in itr_minterms2bln.items(): if len(set_of_blns) == 1: # WE found one! # Take it", "Select_Minterm(minterms2bln) most = 0 best_candidate = None # We determine the \"Best candidate\"", "a time. While it does this, it maps the boolean expressions to minterms", ") # Now find all the minterms related to the boolean minterms_correlated_to_bln =", "the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over them", "add the expanding indices into list if new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index)", "is abused heavily. \"\"\" def Get_Truth_Table(LEN): tt = list() e = dict() e", "True and not just Don't Care, add it to the Covered Minterms if", "just covered. bln_expr.mask += dim2int return covered_minterms, bln_expr class boolean_expression(object): def __init__(self, idx,", "minterms_correlated_to_bln: # Delete all minterms correlated to the highest-scoring boolean for related_bln in", "bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom", "Mar 20 22:08:31 2019 @author: iaricanli \"\"\" import copy T = True F", "them for correlated_minterm in minterms_correlated_to_bln: # and remove the boolean from their knowledge", "keeps track of which minterms are related to what boolean expressions. Return: covered_minterms:", "= True break # We don't want to extend into the space of", "look for any case where a minterm # maps to only one boolean", "= list() e = dict() e = { 0: T, 1: T, 2:", "regarding the results found. Arguments: boolean array: dict (int, truth) - The key", "required_bln ) for i in todelete: del minterms2bln[i] # Now we get rid", "\"\"\" Created on Wed Mar 20 22:08:31 2019 @author: iaricanli \"\"\" import copy", "Allow the Mask to contain the information regarding the dimension # that was", "- The value points to whether that is mapped to True, False, or", "aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr = boolean_expression(idx, 0) #", "here. Arguments: LEN : integer - The dimensionality of the desired output truth", "\"\"\" def _Provide_Index(dim): return range(0, dim) \"\"\" Performs the expansion of a cube", "True, False, or Don't Care. Reading from a file not yet supported. Do", "space.extend(new_space) for ns in new_space: # If the value at the boolean array", "= bln2minterms[required_bln] # Iterate over them for correlated_minterm in minterms_correlated_to_bln: # and remove", "# Then delete the entire boolean from the booly-books del bln2minterms[required_bln] todelete.append(minterm) #", "predictable results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): # # Define truth", "If the value at the boolean array is specifically # True and not", "False D = \"_\" \"\"\" Generate the inputs for the algorithm. A list", "# Define truth table # truth_table = Get_Truth_Table(dim) # Perform the Expand operation", "the new # elements into it. space.extend(new_space) for ns in new_space: # If", "the inputs - The value points to whether that is mapped to True,", "in boolean_array.items(): if bln is T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln)", "by the boolean expression bln_expr: boolean_expression - The boolean expression (defined by a", "else: minterms2bln[idx] = {bln_expr} # Iterate over the indices however we decide for", "move on to the next # dimension. if not _continue: # We like", "2**i # The space being explored new_space = list() for index in space:", "period of multiple timesteps. Each dictionary represents a boolean function. The key is", "trying to reduce something for a 7 segment display over a period of", "value is either True, False, or Don't Care. Reading from a file not", "= bln2minterms[best_candidate] # Iterate over them todelete = list() for correlated_minterm in minterms_correlated_to_bln:", "of booleans as we determine that they are \"the best candidate while len(minterms2bln):", "Created on Wed Mar 20 22:08:31 2019 @author: iaricanli \"\"\" import copy T", "them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the", "abused heavily. \"\"\" def Get_Truth_Table(LEN): tt = list() e = dict() e =", "into 0s and 0s into 1s, depending on the index new_index = index", "the cube, verify that we're expanding it into # valid space. If it", "and it maps the minterms to the boolean expressions. Thus providing the program", "-- same as in the idx we see above), and keeps track of", "keep everything in linear time. minterm = Select_Minterm(minterms2bln) most = 0 best_candidate =", "they are \"the best candidate while len(minterms2bln): # We are looking at only", "= {idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} # Iterate", "minterm. # Scanning a subspace to decrease overall computation time # and keep", "output predictable results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): # # Define", "is a third concept of Don'tCare which is being represented here as \"_\".", "in space: # MAGIC LINE # We need to turn 1s into 0s", "output predictable results. \"\"\" def _Provide_Index(dim): return range(0, dim) \"\"\" Performs the expansion", "points to whether that is mapped to True, False, or DC idx: int", "def __init__(self, idx, mask): self.idx = idx self.mask = mask def __eq__(self, b):", "bln2minterms[bln_expr] = covered_minterms # bln2minterms and minterms2bln # are two dictionaries that are", "Covered Minterms if boolean_array[ns] is T: covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else:", "While it does this, it maps the boolean expressions to minterms and it", "in new_space: # If the value at the boolean array is specifically #", "self.idx == b.idx and self.mask == b.mask def __hash__(self): return hash((self.idx, self.mask)) def", "b.idx and self.mask == b.mask def __hash__(self): return hash((self.idx, self.mask)) def __str__(self): return", "depending on the index new_index = index ^ dim2int # We're expanding the", "True, False, or DC idx: int - The space in the boolean array", "being explored new_space = list() for index in space: # MAGIC LINE #", "We don't want to extend into the space of the selected index #", "dim): # # Iterate over every boolean output # expr_per_output = list() for", "tt \"\"\" This is a helper function, existant in case I want to", "dim: int - The total number of dimensions we are operating in. #", "expression # with the greatest number of related minterms for bln in minterms2bln[minterm]:", "True F = False D = \"_\" \"\"\" Generate the inputs for the", "expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect( list_of_maps ): # # Finds intersections between", "boolean_expression(idx, 0) # Define the space of the cube space = [idx] covered_minterms", "0s and 0s into 1s, depending on the index new_index = index ^", "no longer necessarily output predictable results. \"\"\" def _Provide_Index(dim): return range(0, dim) \"\"\"", "minterms (tracked by integer -- same as in the idx we see above),", "to what boolean expressions. Return: covered_minterms: set - The defined set of minterms", "of a cube through the Nspace Attempting to expand through ever dimension, one", "supported. Do whatever you want in here. Arguments: LEN : integer - The", "e = dict() e = { 0: T, 1: T, 2: D, 3:", "len(set_of_blns) == 1: # WE found one! # Take it required_bln = set_of_blns.pop()", "= len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now find all the minterms related to", "# Iterate over every solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln", "boolean expressions. Return: covered_minterms: set - The defined set of minterms covered by", "to only one boolean expression. required_blns = set() todelete = list() itr_minterms2bln =", "minterms are related to what boolean expressions. Return: covered_minterms: set - The defined", "0: T, 1: T, 2: D, 3: T, 4: F, 5: F, 6:", "no longer necessarily output predictable results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim):", "the expansion begins from. dim: int - The total number of dimensions we", "them todelete = list() for correlated_minterm in minterms_correlated_to_bln: # Delete all minterms correlated", "their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire boolean from the booly-books del", "while len(minterms2bln): # We are looking at only a SINGLE minterm. # Scanning", "is what is a booleanish? in addition to True and False there is", "bln_expr.mask += dim2int return covered_minterms, bln_expr class boolean_expression(object): def __init__(self, idx, mask): self.idx", "only a SINGLE minterm. # Scanning a subspace to decrease overall computation time", "a file not yet supported. Do whatever you want in here. Arguments: LEN", "for bln2minterms, minterms2bln in itr_list_of_maps: # First we're going to look for any", "going to look for any case where a minterm # maps to only", "boolean expression # with the greatest number of related minterms for bln in", "valid, add the expanding indices into list if new_index in boolean_array and boolean_array[new_index]:", "in itr_list_of_maps: # First we're going to look for any case where a", "range(0, dim) \"\"\" Performs the expansion of a cube through the Nspace Attempting", "First we're going to look for any case where a minterm # maps", "are operating in. # REFERENCED BY VALUE minterms2bln: dict (int, boolean_expression) - Maps", "Forreal, delete them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The", "knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire boolean from the booly-books del bln2minterms[required_bln]", "expressions. Thus providing the program a quick view of the rows and columns", "operating in. # REFERENCED BY VALUE minterms2bln: dict (int, boolean_expression) - Maps the", "list() for index in space: # MAGIC LINE # We need to turn", "# Scanning a subspace to decrease overall computation time # and keep everything", "tt (truth table): list (dict (int, Booleanish)) - The only question here really", "the boolean array we are beginning at, where the expansion begins from. dim:", "of boolean variables. Return: tt (truth table): list (dict (int, Booleanish)) - The", "True break # We don't want to extend into the space of the", "minterms2bln in itr_list_of_maps: # First we're going to look for any case where", "different boolean expression input -- say if we are trying to reduce something", "Each element of the list represents a different boolean expression input -- say", "helper function, existant in case I want to expand the code and allow", "the expanding indices into list if new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index) else:", "[idx] covered_minterms = {idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr}", "for bln in minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate = bln most =", "= Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms and minterms2bln #", "list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) == 1:", "Scanning a subspace to decrease overall computation time # and keep everything in", "Don't Care, add it to the Covered Minterms if boolean_array[ns] is T: covered_minterms.add(ns)", "by integer -- same as in the idx we see above), and keeps", "related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete them del minterms2bln[correlated_minterm] for related_bln,", "# The ndelete the aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\"", "as \"_\". It fails \"is True\" but passes \"== True\". this is abused", "itr_minterms2bln.items(): if len(set_of_blns) == 1: # WE found one! # Take it required_bln", "linear time. minterm = Select_Minterm(minterms2bln) most = 0 best_candidate = None # We", "minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} # Iterate over the indices however we", "skip this one and move on to the next # dimension. if not", "D = \"_\" \"\"\" Generate the inputs for the algorithm. A list of", "self.idx = idx self.mask = mask def __eq__(self, b): return self.idx == b.idx", "of related minterms for bln in minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate =", "and remove the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire", "inputs for the algorithm. A list of dictionarys. Each element of the list", "next index _continue = True break # We don't want to extend into", "= bln most = len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now find all the", "only question here really is what is a booleanish? in addition to True", "value points to whether that is mapped to True, False, or DC idx:", "to True and False there is a third concept of Don'tCare which is", "a minterm # maps to only one boolean expression. required_blns = set() todelete", "7 segment display over a period of multiple timesteps. Each dictionary represents a", "T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms", "variables. Return: tt (truth table): list (dict (int, Booleanish)) - The only question", "VALUE minterms2bln: dict (int, boolean_expression) - Maps the minterms (tracked by integer --", "if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} # Iterate over the", "todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns) return", "\"\"\" import copy T = True F = False D = \"_\" \"\"\"", "but passes \"== True\". this is abused heavily. \"\"\" def Get_Truth_Table(LEN): tt =", "del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned", "for correlated_minterm in minterms_correlated_to_bln: # and remove the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln)", "minterms2bln # are two dictionaries that are dually referent # in order to", "1001 -> 9) and the value is either True, False, or Don't Care.", "it maps the minterms to the boolean expressions. Thus providing the program a", "5: F, 6: T, 7: T, } tt.append(e) return tt \"\"\" This is", "for the algorithm. A list of dictionarys. Each element of the list represents", "number of related minterms for bln in minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate", "REFERENCED BY VALUE minterms2bln: dict (int, boolean_expression) - Maps the minterms (tracked by", "return self.idx == b.idx and self.mask == b.mask def __hash__(self): return hash((self.idx, self.mask))", "in. # REFERENCED BY VALUE minterms2bln: dict (int, boolean_expression) - Maps the minterms", "the space of the selected index # if it didn't pan out. So", "= None # We determine the \"Best candidate\" as the boolean expression #", "= 2**i # The space being explored new_space = list() for index in", "covered by the boolean expression bln_expr: boolean_expression - The boolean expression (defined by", "= 0 best_candidate = None # We determine the \"Best candidate\" as the", "D, 3: T, 4: F, 5: F, 6: T, 7: T, } tt.append(e)", "minterms covered by the boolean expression bln_expr: boolean_expression - The boolean expression (defined", "# dimension. if not _continue: # We like the new dimension, and are", "def Get_Truth_Table(LEN): tt = list() e = dict() e = { 0: T,", "for any case where a minterm # maps to only one boolean expression.", "extend into the space of the selected index # if it didn't pan", "mask def __eq__(self, b): return self.idx == b.idx and self.mask == b.mask def", "referent # in order to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def", "to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over them todelete = list()", "True\". this is abused heavily. \"\"\" def Get_Truth_Table(LEN): tt = list() e =", "expression bln_expr: boolean_expression - The boolean expression (defined by a point and then", "__repr__(self): return self.__str__() def Expand(truth_table, dim): # # Iterate over every boolean output", "The total number of dimensions we are operating in. # REFERENCED BY VALUE", "(int, Booleanish)) - The only question here really is what is a booleanish?", "output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps: # First we're going", "A list of dictionarys. Each element of the list represents a different boolean", "== b.mask def __hash__(self): return hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask)", "the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over them", "len(minterms2bln): # We are looking at only a SINGLE minterm. # Scanning a", "randomly, the code would no longer necessarily output predictable results. \"\"\" def Select_Minterm(minterms2bln):", "into the representitive integer dim2int = 2**i # The space being explored new_space", "as the boolean expression # with the greatest number of related minterms for", "_continue: # We like the new dimension, and are going to cover all", "T, 1: T, 2: D, 3: T, 4: F, 5: F, 6: T,", "best_candidate = bln most = len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now find all", "todelete.append((related_bln, correlated_minterm)) # Forreal, delete them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete:", "and False there is a third concept of Don'tCare which is being represented", "add it to the Covered Minterms if boolean_array[ns] is T: covered_minterms.add(ns) if ns", "todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns)", "the code would no longer necessarily output predictable results. \"\"\" def _Provide_Index(dim): return", "correlated_minterm)) # Forreal, delete them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm)", "Mask to contain the information regarding the dimension # that was just covered.", "minterms_correlated_to_bln: # and remove the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete", "subspace to decrease overall computation time # and keep everything in linear time.", "of the rows and columns regarding the results found. Arguments: boolean array: dict", "Wed Mar 20 22:08:31 2019 @author: iaricanli \"\"\" import copy T = True", "related to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over them todelete =", "a different boolean expression input -- say if we are trying to reduce", "and columns regarding the results found. Arguments: boolean array: dict (int, truth) -", "# Iterate over the indices however we decide for i in _Provide_Index(dim): #", "the boolean expression bln_expr: boolean_expression - The boolean expression (defined by a point", "# Control variable to exit a loop _continue = False # Convert the", "the list represents a different boolean expression input -- say if we are", "desired output truth table. AKA, the number of boolean variables. Return: tt (truth", "MAGIC LINE # We need to turn 1s into 0s and 0s into", "return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__() def Expand(truth_table, dim): # #", "results found. Arguments: boolean array: dict (int, truth) - The key maps to", "bln is T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms", "the booly-books del bln2minterms[required_bln] todelete.append(minterm) # And remember what we've done on this", "the value at the boolean array is specifically # True and not just", "which is being represented here as \"_\". It fails \"is True\" but passes", "Booleanish)) - The only question here really is what is a booleanish? in", "minterms and it maps the minterms to the boolean expressions. Thus providing the", "= index ^ dim2int # We're expanding the cube, verify that we're expanding", "in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm", "best_candidate ) # Now find all the minterms related to the boolean minterms_correlated_to_bln", "Then delete the entire boolean from the booly-books del bln2minterms[required_bln] todelete.append(minterm) # And", "addition to True and False there is a third concept of Don'tCare which", "BY VALUE minterms2bln: dict (int, boolean_expression) - Maps the minterms (tracked by integer", "Performs the expansion of a cube through the Nspace Attempting to expand through", "Return: covered_minterms: set - The defined set of minterms covered by the boolean", "the boolean input represented in integer form (aka A^!B^!C^D -> 1001 -> 9)", "it didn't pan out. So skip this one and move on to the", "del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This is a helper function, existant in", "dict (int, boolean_expression) - Maps the minterms (tracked by integer -- same as", "boolean_array in truth_table: bln2minterms= dict() minterms2bln = dict() for idx, bln in boolean_array.items():", "the new space doesn't pan out _perfectly_, keep going to # the the", "# Now we get rid of booleans as we determine that they are", "Iterate over the indices however we decide for i in _Provide_Index(dim): # Control", "utf-8 -*- \"\"\" Created on Wed Mar 20 22:08:31 2019 @author: iaricanli \"\"\"", "existant in case I want to expand the code and allow different iteration", "the dimension # that was just covered. bln_expr.mask += dim2int return covered_minterms, bln_expr", "in the boolean array we are beginning at, where the expansion begins from.", "to the boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over them for correlated_minterm in", "(int, truth) - The key maps to the integer representation of the inputs", "want to extend into the space of the selected index # if it", "Each dictionary represents a boolean function. The key is the boolean input represented", "covered_minterms = {idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} #", "space doesn't pan out _perfectly_, keep going to # the the next index", "if boolean_array[ns] is T: covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] =", "output # expr_per_output = list() for boolean_array in truth_table: bln2minterms= dict() minterms2bln =", "fails \"is True\" but passes \"== True\". this is abused heavily. \"\"\" def", "The space being explored new_space = list() for index in space: # MAGIC", "the greatest number of related minterms for bln in minterms2bln[minterm]: if len(bln2minterms[bln]) >", "boolean input represented in integer form (aka A^!B^!C^D -> 1001 -> 9) and", "to # the the next index _continue = True break # We don't", "# the the next index _continue = True break # We don't want", "array is specifically # True and not just Don't Care, add it to", "We're expanding the cube, verify that we're expanding it into # valid space.", "lom.append(required_blns) return lom \"\"\" This is a helper function, existant in case I", "explored new_space = list() for index in space: # MAGIC LINE # We", "correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned best candidate del bln2minterms[best_candidate]", "= {bln_expr} # Allow the Mask to contain the information regarding the dimension", "that we're expanding it into # valid space. If it is valid, add", "time # and keep everything in linear time. minterm = Select_Minterm(minterms2bln) most =", "expr_per_output def Intersect( list_of_maps ): # # Finds intersections between boolean statements and", "bln_expr class boolean_expression(object): def __init__(self, idx, mask): self.idx = idx self.mask = mask", "= True F = False D = \"_\" \"\"\" Generate the inputs for", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on Wed Mar 20", ": integer - The dimensionality of the desired output truth table. AKA, the", "this is abused heavily. \"\"\" def Get_Truth_Table(LEN): tt = list() e = dict()", "= [idx] covered_minterms = {idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] =", "break # We don't want to extend into the space of the selected", "AKA, the number of boolean variables. Return: tt (truth table): list (dict (int,", "and keeps track of which minterms are related to what boolean expressions. Return:", "defined set of minterms covered by the boolean expression bln_expr: boolean_expression - The", "expanding it into # valid space. If it is valid, add the expanding", "self.__str__() def Expand(truth_table, dim): # # Iterate over every boolean output # expr_per_output", "found one! # Take it required_bln = set_of_blns.pop() # Now find all the", "the boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over them for correlated_minterm in minterms_correlated_to_bln:", "at, where the expansion begins from. dim: int - The total number of", "array: dict (int, truth) - The key maps to the integer representation of", "): # # Finds intersections between boolean statements and # the minterms they", "value at the boolean array is specifically # True and not just Don't", "return range(0, dim) \"\"\" Performs the expansion of a cube through the Nspace", "truth) - The key maps to the integer representation of the inputs -", "to look for any case where a minterm # maps to only one", "= set() todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items():", "0) # Define the space of the cube space = [idx] covered_minterms =", "minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms and minterms2bln # are two dictionaries that", "however we decide for i in _Provide_Index(dim): # Control variable to exit a", "what we've done on this day, this evil day. required_blns.add( required_bln ) for", "represented in integer form (aka A^!B^!C^D -> 1001 -> 9) and the value", "most: best_candidate = bln most = len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now find", "\"the best candidate while len(minterms2bln): # We are looking at only a SINGLE", "- The key maps to the integer representation of the inputs - The", "solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps: # First", "the Expand operation on every output set list_of_maps = Expand(truth_table, dim) list_of_covering_blns =", "of multiple timesteps. Each dictionary represents a boolean function. The key is the", "input represented in integer form (aka A^!B^!C^D -> 1001 -> 9) and the", "the Mask to contain the information regarding the dimension # that was just", "going to # the the next index _continue = True break # We", "0 best_candidate = None # We determine the \"Best candidate\" as the boolean", "minterms2bln): bln_expr = boolean_expression(idx, 0) # Define the space of the cube space", "__init__(self, idx, mask): self.idx = idx self.mask = mask def __eq__(self, b): return", "form (aka A^!B^!C^D -> 1001 -> 9) and the value is either True,", "covered_minterms: set - The defined set of minterms covered by the boolean expression", "i in todelete: del minterms2bln[i] # Now we get rid of booleans as", "bln2minterms, minterms2bln in itr_list_of_maps: # First we're going to look for any case", "the number of boolean variables. Return: tt (truth table): list (dict (int, Booleanish))", "code would no longer necessarily output predictable results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0]", "a third concept of Don'tCare which is being represented here as \"_\". It", "on this day, this evil day. required_blns.add( required_bln ) for i in todelete:", "the information regarding the dimension # that was just covered. bln_expr.mask += dim2int", "or DC idx: int - The space in the boolean array we are", "# -*- coding: utf-8 -*- \"\"\" Created on Wed Mar 20 22:08:31 2019", "integer -- same as in the idx we see above), and keeps track", "algorithm. A list of dictionarys. Each element of the list represents a different", "represents a different boolean expression input -- say if we are trying to", "in addition to True and False there is a third concept of Don'tCare", "in minterms_correlated_to_bln: # and remove the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then", "bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This is a helper function, existant in case", "function, existant in case I want to expand the code and allow different", "dict (int, truth) - The key maps to the integer representation of the", "on the index new_index = index ^ dim2int # We're expanding the cube,", "We like the new dimension, and are going to cover all the new", "they cover # lom = list() # Iterate over every solution-set per output", "\"_\" \"\"\" Generate the inputs for the algorithm. A list of dictionarys. Each", "and then a mask) that covered the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx,", "= {bln_expr} # Iterate over the indices however we decide for i in", "= covered_minterms # bln2minterms and minterms2bln # are two dictionaries that are dually", "dimension. if not _continue: # We like the new dimension, and are going", "are going to cover all the new # elements into it. space.extend(new_space) for", "new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index) else: # If the new space doesn't", "to decrease overall computation time # and keep everything in linear time. minterm", "# truth_table = Get_Truth_Table(dim) # Perform the Expand operation on every output set", "bln2minterms[required_bln] todelete.append(minterm) # And remember what we've done on this day, this evil", "decrease overall computation time # and keep everything in linear time. minterm =", "for index in space: # MAGIC LINE # We need to turn 1s", "So skip this one and move on to the next # dimension. if", "space of the selected index # if it didn't pan out. So skip", "number of boolean variables. Return: tt (truth table): list (dict (int, Booleanish)) -", "set of minterms covered by the boolean expression bln_expr: boolean_expression - The boolean", "pan out _perfectly_, keep going to # the the next index _continue =", "Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr = boolean_expression(idx, 0) # Define the space of", "False, or Don't Care. Reading from a file not yet supported. Do whatever", "this, it maps the boolean expressions to minterms and it maps the minterms", "table): list (dict (int, Booleanish)) - The only question here really is what", "Expand(truth_table, dim): # # Iterate over every boolean output # expr_per_output = list()", "remove the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire boolean", "key maps to the integer representation of the inputs - The value points", "# First we're going to look for any case where a minterm #", "the minterms (tracked by integer -- same as in the idx we see", "maps the boolean expressions to minterms and it maps the minterms to the", "(int, boolean_expression) - Maps the minterms (tracked by integer -- same as in", "# We determine the \"Best candidate\" as the boolean expression # with the", "expression input -- say if we are trying to reduce something for a", "minterms2bln[ns] = {bln_expr} # Allow the Mask to contain the information regarding the", "number of dimensions we are operating in. # REFERENCED BY VALUE minterms2bln: dict", "= copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) == 1: # WE", "expand through ever dimension, one at a time. While it does this, it", "a boolean function. The key is the boolean input represented in integer form", "b): return self.idx == b.idx and self.mask == b.mask def __hash__(self): return hash((self.idx,", "where a minterm # maps to only one boolean expression. required_blns = set()", "minterms related to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over them todelete", "the aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This is a", "list() # Iterate over every solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms,", "-- say if we are trying to reduce something for a 7 segment", "# the minterms they cover # lom = list() # Iterate over every", "program a quick view of the rows and columns regarding the results found.", "maps the minterms to the boolean expressions. Thus providing the program a quick", "that is mapped to True, False, or DC idx: int - The space", "- The total number of dimensions we are operating in. # REFERENCED BY", "didn't pan out. So skip this one and move on to the next", "only one boolean expression. required_blns = set() todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln)", "expression (defined by a point and then a mask) that covered the aforementioned", "boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over them for correlated_minterm in minterms_correlated_to_bln: #", "of Don'tCare which is being represented here as \"_\". It fails \"is True\"", "int - The total number of dimensions we are operating in. # REFERENCED", "= Get_Truth_Table(dim) # Perform the Expand operation on every output set list_of_maps =", "two dictionaries that are dually referent # in order to keep computations fast.", "tt.append(e) return tt \"\"\" This is a helper function, existant in case I", "of which minterms are related to what boolean expressions. Return: covered_minterms: set -", "# Forreal, delete them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) #", "The defined set of minterms covered by the boolean expression bln_expr: boolean_expression -", "we determine that they are \"the best candidate while len(minterms2bln): # We are", "greatest number of related minterms for bln in minterms2bln[minterm]: if len(bln2minterms[bln]) > most:", "necessarily output predictable results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): # #", "here really is what is a booleanish? in addition to True and False", "and self.mask == b.mask def __hash__(self): return hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0},", "# Perform the Expand operation on every output set list_of_maps = Expand(truth_table, dim)", "# We need to turn 1s into 0s and 0s into 1s, depending", "selected index # if it didn't pan out. So skip this one and", "= list() for boolean_array in truth_table: bln2minterms= dict() minterms2bln = dict() for idx,", "for i in _Provide_Index(dim): # Control variable to exit a loop _continue =", "bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms and minterms2bln", "Iterate over every boolean output # expr_per_output = list() for boolean_array in truth_table:", "boolean from the booly-books del bln2minterms[required_bln] todelete.append(minterm) # And remember what we've done", "Delete all minterms correlated to the highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln,", "minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} # Allow the Mask to contain the", "we're going to look for any case where a minterm # maps to", "represents a boolean function. The key is the boolean input represented in integer", "aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This is a helper", "I want to expand the code and allow different iteration over the inputs.", "minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr = boolean_expression(idx, 0) # Define", "regarding the dimension # that was just covered. bln_expr.mask += dim2int return covered_minterms,", "ever dimension, one at a time. While it does this, it maps the", "indices into list if new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index) else: # If", "and boolean_array[new_index]: new_space.append(new_index) else: # If the new space doesn't pan out _perfectly_,", "is T: covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} #", "bln2minterms[required_bln] # Iterate over them for correlated_minterm in minterms_correlated_to_bln: # and remove the", "are dually referent # in order to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return", "boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire boolean from the", "minterms2bln)) return expr_per_output def Intersect( list_of_maps ): # # Finds intersections between boolean", "that was just covered. bln_expr.mask += dim2int return covered_minterms, bln_expr class boolean_expression(object): def", "= list() for correlated_minterm in minterms_correlated_to_bln: # Delete all minterms correlated to the", "# We don't want to extend into the space of the selected index", "done on this day, this evil day. required_blns.add( required_bln ) for i in", "minterms correlated to the highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) #", "(dict (int, Booleanish)) - The only question here really is what is a", "7: T, } tt.append(e) return tt \"\"\" This is a helper function, existant", "It fails \"is True\" but passes \"== True\". this is abused heavily. \"\"\"", "of the desired output truth table. AKA, the number of boolean variables. Return:", "are related to what boolean expressions. Return: covered_minterms: set - The defined set", "set_of_blns.pop() # Now find all the minterms related to the boolean minterms_correlated_to_bln =", "} tt.append(e) return tt \"\"\" This is a helper function, existant in case", "are two dictionaries that are dually referent # in order to keep computations", "Iterate over them for correlated_minterm in minterms_correlated_to_bln: # and remove the boolean from", "Get_Truth_Table(LEN): tt = list() e = dict() e = { 0: T, 1:", "bln most = len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now find all the minterms", "The only question here really is what is a booleanish? in addition to", "None # We determine the \"Best candidate\" as the boolean expression # with", "Don't Care. Reading from a file not yet supported. Do whatever you want", "see above), and keeps track of which minterms are related to what boolean", "a loop _continue = False # Convert the index into the representitive integer", "it required_bln = set_of_blns.pop() # Now find all the minterms related to the", "__hash__(self): return hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return", "return expr_per_output def Intersect( list_of_maps ): # # Finds intersections between boolean statements", "the results found. Arguments: boolean array: dict (int, truth) - The key maps", "\"\"\" Generate the inputs for the algorithm. A list of dictionarys. Each element", "Maps the minterms (tracked by integer -- same as in the idx we", "one boolean expression. required_blns = set() todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln) for", "of the inputs - The value points to whether that is mapped to", "decide for i in _Provide_Index(dim): # Control variable to exit a loop _continue", "in todelete: del minterms2bln[i] # Now we get rid of booleans as we", "_continue = True break # We don't want to extend into the space", "is mapped to True, False, or DC idx: int - The space in", "booleans as we determine that they are \"the best candidate while len(minterms2bln): #", "index new_index = index ^ dim2int # We're expanding the cube, verify that", "and not just Don't Care, add it to the Covered Minterms if boolean_array[ns]", "if new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index) else: # If the new space", "one! # Take it required_bln = set_of_blns.pop() # Now find all the minterms", "20 22:08:31 2019 @author: iaricanli \"\"\" import copy T = True F =", "output truth table. AKA, the number of boolean variables. Return: tt (truth table):", "there is a third concept of Don'tCare which is being represented here as", "statements and # the minterms they cover # lom = list() # Iterate", "code and allow different iteration over the inputs. If this returned randomly, the", "dict() e = { 0: T, 1: T, 2: D, 3: T, 4:", "__str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__() def Expand(truth_table, dim): #", "keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect( list_of_maps ): # #", "Control variable to exit a loop _continue = False # Convert the index", "list() e = dict() e = { 0: T, 1: T, 2: D,", "through the Nspace Attempting to expand through ever dimension, one at a time.", "the code would no longer necessarily output predictable results. \"\"\" def Select_Minterm(minterms2bln): return", "at the boolean array is specifically # True and not just Don't Care,", "del minterms2bln[i] # Now we get rid of booleans as we determine that", "would no longer necessarily output predictable results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def", "- The dimensionality of the desired output truth table. AKA, the number of", "def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr = boolean_expression(idx, 0) # Define the space", "The dimensionality of the desired output truth table. AKA, the number of boolean", "1: T, 2: D, 3: T, 4: F, 5: F, 6: T, 7:", "and the value is either True, False, or Don't Care. Reading from a", "The value points to whether that is mapped to True, False, or DC", "new_space.append(new_index) else: # If the new space doesn't pan out _perfectly_, keep going", "every solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps: #", "elements into it. space.extend(new_space) for ns in new_space: # If the value at", "mask): self.idx = idx self.mask = mask def __eq__(self, b): return self.idx ==", "list() for correlated_minterm in minterms_correlated_to_bln: # Delete all minterms correlated to the highest-scoring", "-*- coding: utf-8 -*- \"\"\" Created on Wed Mar 20 22:08:31 2019 @author:", "reduce something for a 7 segment display over a period of multiple timesteps.", "F, 6: T, 7: T, } tt.append(e) return tt \"\"\" This is a", "todelete.append(minterm) # And remember what we've done on this day, this evil day.", "the boolean array is specifically # True and not just Don't Care, add", "necessarily output predictable results. \"\"\" def _Provide_Index(dim): return range(0, dim) \"\"\" Performs the", "DC idx: int - The space in the boolean array we are beginning", "array we are beginning at, where the expansion begins from. dim: int -", "# Finds intersections between boolean statements and # the minterms they cover #", "lom \"\"\" This is a helper function, existant in case I want to", "that they are \"the best candidate while len(minterms2bln): # We are looking at", "return covered_minterms, bln_expr class boolean_expression(object): def __init__(self, idx, mask): self.idx = idx self.mask", "find all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate", "Care, add it to the Covered Minterms if boolean_array[ns] is T: covered_minterms.add(ns) if", "is valid, add the expanding indices into list if new_index in boolean_array and", "evil day. required_blns.add( required_bln ) for i in todelete: del minterms2bln[i] # Now", "are looking at only a SINGLE minterm. # Scanning a subspace to decrease", "results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): # # Define truth table", "covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} # Allow the", "boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete them del minterms2bln[correlated_minterm]", "for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete them del minterms2bln[correlated_minterm] for", "related minterms for bln in minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate = bln", "key is the boolean input represented in integer form (aka A^!B^!C^D -> 1001", "Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms and minterms2bln # are", "False there is a third concept of Don'tCare which is being represented here", "the selected index # if it didn't pan out. So skip this one", "to minterms and it maps the minterms to the boolean expressions. Thus providing", "the boolean expression # with the greatest number of related minterms for bln", "in the idx we see above), and keeps track of which minterms are", "related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned best candidate del", "Intersect( list_of_maps ): # # Finds intersections between boolean statements and # the", "dim2int # We're expanding the cube, verify that we're expanding it into #", "begins from. dim: int - The total number of dimensions we are operating", "contain the information regarding the dimension # that was just covered. bln_expr.mask +=", "timesteps. Each dictionary represents a boolean function. The key is the boolean input", "columns regarding the results found. Arguments: boolean array: dict (int, truth) - The", "= mask def __eq__(self, b): return self.idx == b.idx and self.mask == b.mask", "space = [idx] covered_minterms = {idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx]", "is the boolean input represented in integer form (aka A^!B^!C^D -> 1001 ->", "the code and allow different iteration over the inputs. If this returned randomly,", "{bln_expr} # Allow the Mask to contain the information regarding the dimension #", "- The defined set of minterms covered by the boolean expression bln_expr: boolean_expression", "most = 0 best_candidate = None # We determine the \"Best candidate\" as", "in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns)", "T, } tt.append(e) return tt \"\"\" This is a helper function, existant in", "__eq__(self, b): return self.idx == b.idx and self.mask == b.mask def __hash__(self): return", "out _perfectly_, keep going to # the the next index _continue = True", "idx self.mask = mask def __eq__(self, b): return self.idx == b.idx and self.mask", "covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms and", "truth_table = Get_Truth_Table(dim) # Perform the Expand operation on every output set list_of_maps", "either True, False, or Don't Care. Reading from a file not yet supported.", "boolean_array[new_index]: new_space.append(new_index) else: # If the new space doesn't pan out _perfectly_, keep", "to the next # dimension. if not _continue: # We like the new", "new # elements into it. space.extend(new_space) for ns in new_space: # If the", "found. Arguments: boolean array: dict (int, truth) - The key maps to the", "the inputs. If this returned randomly, the code would no longer necessarily output", "minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) == 1: # WE found one! #", "a point and then a mask) that covered the aforementioned minterms. \"\"\" def", "space. If it is valid, add the expanding indices into list if new_index", "# # Finds intersections between boolean statements and # the minterms they cover", "\"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__() def Expand(truth_table, dim): # # Iterate", "expressions. Return: covered_minterms: set - The defined set of minterms covered by the", "delete the entire boolean from the booly-books del bln2minterms[required_bln] todelete.append(minterm) # And remember", "truth table. AKA, the number of boolean variables. Return: tt (truth table): list", "the value is either True, False, or Don't Care. Reading from a file", "specifically # True and not just Don't Care, add it to the Covered", "= copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps: # First we're going to look", "we get rid of booleans as we determine that they are \"the best", ") for i in todelete: del minterms2bln[i] # Now we get rid of", "9) and the value is either True, False, or Don't Care. Reading from", "Finds intersections between boolean statements and # the minterms they cover # lom", "# and keep everything in linear time. minterm = Select_Minterm(minterms2bln) most = 0", "if we are trying to reduce something for a 7 segment display over", "boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over them todelete = list() for correlated_minterm", "dimensionality of the desired output truth table. AKA, the number of boolean variables.", "len(bln2minterms[bln]) > most: best_candidate = bln most = len(bln2minterms[bln]) required_blns.add( best_candidate ) #", "the next # dimension. if not _continue: # We like the new dimension,", "\"is True\" but passes \"== True\". this is abused heavily. \"\"\" def Get_Truth_Table(LEN):", "or Don't Care. Reading from a file not yet supported. Do whatever you", "don't want to extend into the space of the selected index # if", "from a file not yet supported. Do whatever you want in here. Arguments:", "above), and keeps track of which minterms are related to what boolean expressions.", "ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} # Allow the Mask to", "table # truth_table = Get_Truth_Table(dim) # Perform the Expand operation on every output", "computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect( list_of_maps ): # # Finds", "True and False there is a third concept of Don'tCare which is being", "dictionaries that are dually referent # in order to keep computations fast. expr_per_output.append((bln2minterms,", "minterms2bln[idx] = {bln_expr} # Iterate over the indices however we decide for i", "to contain the information regarding the dimension # that was just covered. bln_expr.mask", "passes \"== True\". this is abused heavily. \"\"\" def Get_Truth_Table(LEN): tt = list()", "the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over them todelete = list() for", "longer necessarily output predictable results. \"\"\" def _Provide_Index(dim): return range(0, dim) \"\"\" Performs", "index in space: # MAGIC LINE # We need to turn 1s into", "index # if it didn't pan out. So skip this one and move", "list(minterms2bln.keys())[0] def main(dim): # # Define truth table # truth_table = Get_Truth_Table(dim) #", "- The boolean expression (defined by a point and then a mask) that", "required_blns.add( required_bln ) for i in todelete: del minterms2bln[i] # Now we get", "minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} # Allow the Mask to contain the information", "a helper function, existant in case I want to expand the code and", "# If the value at the boolean array is specifically # True and", "boolean output # expr_per_output = list() for boolean_array in truth_table: bln2minterms= dict() minterms2bln", "out. So skip this one and move on to the next # dimension.", "to True, False, or DC idx: int - The space in the boolean", "third concept of Don'tCare which is being represented here as \"_\". It fails", "6: T, 7: T, } tt.append(e) return tt \"\"\" This is a helper", "the minterms to the boolean expressions. Thus providing the program a quick view", "cube through the Nspace Attempting to expand through ever dimension, one at a", "minterms to the boolean expressions. Thus providing the program a quick view of", "whether that is mapped to True, False, or DC idx: int - The", "indices however we decide for i in _Provide_Index(dim): # Control variable to exit", "i in _Provide_Index(dim): # Control variable to exit a loop _continue = False", "expr_per_output = list() for boolean_array in truth_table: bln2minterms= dict() minterms2bln = dict() for", "bln2minterms and minterms2bln # are two dictionaries that are dually referent # in", "to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect( list_of_maps ): #", "copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps: # First we're going to look for", "variable to exit a loop _continue = False # Convert the index into", "if len(set_of_blns) == 1: # WE found one! # Take it required_bln =", "related to what boolean expressions. Return: covered_minterms: set - The defined set of", "from the booly-books del bln2minterms[required_bln] todelete.append(minterm) # And remember what we've done on", "boolean array: dict (int, truth) - The key maps to the integer representation", "dictionary represents a boolean function. The key is the boolean input represented in", "a quick view of the rows and columns regarding the results found. Arguments:", "over a period of multiple timesteps. Each dictionary represents a boolean function. The", "function. The key is the boolean input represented in integer form (aka A^!B^!C^D", "being represented here as \"_\". It fails \"is True\" but passes \"== True\".", "Nspace Attempting to expand through ever dimension, one at a time. While it", "minterms related to the boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over them for", "Generate the inputs for the algorithm. A list of dictionarys. Each element of", "and minterms2bln # are two dictionaries that are dually referent # in order", "correlated_minterm in minterms_correlated_to_bln: # and remove the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) #", "to expand the code and allow different iteration over the inputs. If this", "not yet supported. Do whatever you want in here. Arguments: LEN : integer", "like the new dimension, and are going to cover all the new #", "from. dim: int - The total number of dimensions we are operating in.", "is T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms #", "T, 2: D, 3: T, 4: F, 5: F, 6: T, 7: T,", "b.mask def __hash__(self): return hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def", "just Don't Care, add it to the Covered Minterms if boolean_array[ns] is T:", "of dictionarys. Each element of the list represents a different boolean expression input", "list represents a different boolean expression input -- say if we are trying", "in here. Arguments: LEN : integer - The dimensionality of the desired output", "representation of the inputs - The value points to whether that is mapped", "-> 1001 -> 9) and the value is either True, False, or Don't", "per output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps: # First we're", "what is a booleanish? in addition to True and False there is a", "day, this evil day. required_blns.add( required_bln ) for i in todelete: del minterms2bln[i]", "1: # WE found one! # Take it required_bln = set_of_blns.pop() # Now", "cover # lom = list() # Iterate over every solution-set per output itr_list_of_maps", "minterms they cover # lom = list() # Iterate over every solution-set per", "in order to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect( list_of_maps", "LEN : integer - The dimensionality of the desired output truth table. AKA,", "_Provide_Index(dim): return range(0, dim) \"\"\" Performs the expansion of a cube through the", "inputs. If this returned randomly, the code would no longer necessarily output predictable", "it does this, it maps the boolean expressions to minterms and it maps", "If the new space doesn't pan out _perfectly_, keep going to # the", "Take it required_bln = set_of_blns.pop() # Now find all the minterms related to", "-> 9) and the value is either True, False, or Don't Care. Reading", "into list if new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index) else: # If the", "is a booleanish? in addition to True and False there is a third", "Care. Reading from a file not yet supported. Do whatever you want in", "the the next index _continue = True break # We don't want to", "list if new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index) else: # If the new", "bln2minterms= dict() minterms2bln = dict() for idx, bln in boolean_array.items(): if bln is", "A^!B^!C^D -> 1001 -> 9) and the value is either True, False, or", "boolean expression. required_blns = set() todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm,", "return lom \"\"\" This is a helper function, existant in case I want", "tt = list() e = dict() e = { 0: T, 1: T,", "len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now find all the minterms related to the", "This is a helper function, existant in case I want to expand the", "does this, it maps the boolean expressions to minterms and it maps the", "a mask) that covered the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln):", "# Define the space of the cube space = [idx] covered_minterms = {idx}", "a cube through the Nspace Attempting to expand through ever dimension, one at", "False, or DC idx: int - The space in the boolean array we", "# # Iterate over every boolean output # expr_per_output = list() for boolean_array", "over them todelete = list() for correlated_minterm in minterms_correlated_to_bln: # Delete all minterms", "Expand operation on every output set list_of_maps = Expand(truth_table, dim) list_of_covering_blns = Intersect(list_of_maps)", "22:08:31 2019 @author: iaricanli \"\"\" import copy T = True F = False", "same as in the idx we see above), and keeps track of which", "dim, minterms2bln): bln_expr = boolean_expression(idx, 0) # Define the space of the cube", "# And remember what we've done on this day, this evil day. required_blns.add(", "this returned randomly, the code would no longer necessarily output predictable results. \"\"\"", "space of the cube space = [idx] covered_minterms = {idx} if idx in", "boolean expression bln_expr: boolean_expression - The boolean expression (defined by a point and", "and move on to the next # dimension. if not _continue: # We", "If this returned randomly, the code would no longer necessarily output predictable results.", "inputs - The value points to whether that is mapped to True, False,", "doesn't pan out _perfectly_, keep going to # the the next index _continue", "everything in linear time. minterm = Select_Minterm(minterms2bln) most = 0 best_candidate = None", "Iterate over them todelete = list() for correlated_minterm in minterms_correlated_to_bln: # Delete all", "covered_minterms, bln_expr class boolean_expression(object): def __init__(self, idx, mask): self.idx = idx self.mask =", "covered. bln_expr.mask += dim2int return covered_minterms, bln_expr class boolean_expression(object): def __init__(self, idx, mask):", "file not yet supported. Do whatever you want in here. Arguments: LEN :", "candidate\" as the boolean expression # with the greatest number of related minterms", "= set_of_blns.pop() # Now find all the minterms related to the boolean minterms_correlated_to_bln", "we are operating in. # REFERENCED BY VALUE minterms2bln: dict (int, boolean_expression) -", "Now we get rid of booleans as we determine that they are \"the", "in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} # Allow the Mask to contain", "on every output set list_of_maps = Expand(truth_table, dim) list_of_covering_blns = Intersect(list_of_maps) return list_of_covering_blns", "to reduce something for a 7 segment display over a period of multiple", "multiple timesteps. Each dictionary represents a boolean function. The key is the boolean", "# lom = list() # Iterate over every solution-set per output itr_list_of_maps =", "maps to only one boolean expression. required_blns = set() todelete = list() itr_minterms2bln", "Thus providing the program a quick view of the rows and columns regarding", "dict() minterms2bln = dict() for idx, bln in boolean_array.items(): if bln is T:", "the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire boolean from", "what boolean expressions. Return: covered_minterms: set - The defined set of minterms covered", "over every boolean output # expr_per_output = list() for boolean_array in truth_table: bln2minterms=", "0s into 1s, depending on the index new_index = index ^ dim2int #", "dictionarys. Each element of the list represents a different boolean expression input --", "# if it didn't pan out. So skip this one and move on", "are trying to reduce something for a 7 segment display over a period", "Attempting to expand through ever dimension, one at a time. While it does", "Iterate over every solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in", "all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over", "we decide for i in _Provide_Index(dim): # Control variable to exit a loop", "through ever dimension, one at a time. While it does this, it maps", "the index new_index = index ^ dim2int # We're expanding the cube, verify", "- Maps the minterms (tracked by integer -- same as in the idx", "boolean_expression - The boolean expression (defined by a point and then a mask)", "self.mask) def __repr__(self): return self.__str__() def Expand(truth_table, dim): # # Iterate over every", "one at a time. While it does this, it maps the boolean expressions", "in _Provide_Index(dim): # Control variable to exit a loop _continue = False #", "new_index = index ^ dim2int # We're expanding the cube, verify that we're", "and are going to cover all the new # elements into it. space.extend(new_space)", "True\" but passes \"== True\". this is abused heavily. \"\"\" def Get_Truth_Table(LEN): tt", "space: # MAGIC LINE # We need to turn 1s into 0s and", "dimension # that was just covered. bln_expr.mask += dim2int return covered_minterms, bln_expr class", "the new dimension, and are going to cover all the new # elements", "expression. required_blns = set() todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns", "to turn 1s into 0s and 0s into 1s, depending on the index", "# MAGIC LINE # We need to turn 1s into 0s and 0s", "truth_table: bln2minterms= dict() minterms2bln = dict() for idx, bln in boolean_array.items(): if bln", "def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__() def Expand(truth_table, dim):", "with the greatest number of related minterms for bln in minterms2bln[minterm]: if len(bln2minterms[bln])", "# expr_per_output = list() for boolean_array in truth_table: bln2minterms= dict() minterms2bln = dict()", "the \"Best candidate\" as the boolean expression # with the greatest number of", "{1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__() def Expand(truth_table, dim): # # Iterate over", "whatever you want in here. Arguments: LEN : integer - The dimensionality of", "are \"the best candidate while len(minterms2bln): # We are looking at only a", "== 1: # WE found one! # Take it required_bln = set_of_blns.pop() #", "\"\"\" This is a helper function, existant in case I want to expand", "@author: iaricanli \"\"\" import copy T = True F = False D =", "boolean_expression) - Maps the minterms (tracked by integer -- same as in the", "dim2int return covered_minterms, bln_expr class boolean_expression(object): def __init__(self, idx, mask): self.idx = idx", "for minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) == 1: # WE found one!", "main(dim): # # Define truth table # truth_table = Get_Truth_Table(dim) # Perform the", "segment display over a period of multiple timesteps. Each dictionary represents a boolean", "_continue = False # Convert the index into the representitive integer dim2int =", "for a 7 segment display over a period of multiple timesteps. Each dictionary", "results. \"\"\" def _Provide_Index(dim): return range(0, dim) \"\"\" Performs the expansion of a", "Now find all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[required_bln] #", "computation time # and keep everything in linear time. minterm = Select_Minterm(minterms2bln) most", "not just Don't Care, add it to the Covered Minterms if boolean_array[ns] is", "into # valid space. If it is valid, add the expanding indices into", "return tt \"\"\" This is a helper function, existant in case I want", "concept of Don'tCare which is being represented here as \"_\". It fails \"is", "iaricanli \"\"\" import copy T = True F = False D = \"_\"", "dim) \"\"\" Performs the expansion of a cube through the Nspace Attempting to", "to whether that is mapped to True, False, or DC idx: int -", "exit a loop _continue = False # Convert the index into the representitive", "into the space of the selected index # if it didn't pan out.", "the desired output truth table. AKA, the number of boolean variables. Return: tt", "2: D, 3: T, 4: F, 5: F, 6: T, 7: T, }", "return self.__str__() def Expand(truth_table, dim): # # Iterate over every boolean output #", "order to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect( list_of_maps ):", "space being explored new_space = list() for index in space: # MAGIC LINE", "cube, verify that we're expanding it into # valid space. If it is", "remember what we've done on this day, this evil day. required_blns.add( required_bln )", "todelete = list() for correlated_minterm in minterms_correlated_to_bln: # Delete all minterms correlated to", "predictable results. \"\"\" def _Provide_Index(dim): return range(0, dim) \"\"\" Performs the expansion of", "idx, mask): self.idx = idx self.mask = mask def __eq__(self, b): return self.idx", "in case I want to expand the code and allow different iteration over", "itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps: # First we're going to", "integer form (aka A^!B^!C^D -> 1001 -> 9) and the value is either", "verify that we're expanding it into # valid space. If it is valid,", "def main(dim): # # Define truth table # truth_table = Get_Truth_Table(dim) # Perform", "into it. space.extend(new_space) for ns in new_space: # If the value at the", "Get_Truth_Table(dim) # Perform the Expand operation on every output set list_of_maps = Expand(truth_table,", "that are dually referent # in order to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln))", "point and then a mask) that covered the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array,", "idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} # Iterate over the indices", "it to the Covered Minterms if boolean_array[ns] is T: covered_minterms.add(ns) if ns in", "list_of_maps ): # # Finds intersections between boolean statements and # the minterms", "for idx, bln in boolean_array.items(): if bln is T: covered_minterms, bln_expr = Expand_Cube(boolean_array,", "the program a quick view of the rows and columns regarding the results", "self.mask == b.mask def __hash__(self): return hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx,", "Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): # # Define truth table # truth_table =", "{ 0: T, 1: T, 2: D, 3: T, 4: F, 5: F,", "minterms2bln = dict() for idx, bln in boolean_array.items(): if bln is T: covered_minterms,", "case I want to expand the code and allow different iteration over the", "We determine the \"Best candidate\" as the boolean expression # with the greatest", "to extend into the space of the selected index # if it didn't", "the representitive integer dim2int = 2**i # The space being explored new_space =", "delete them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete", "# We like the new dimension, and are going to cover all the", "information regarding the dimension # that was just covered. bln_expr.mask += dim2int return", "And remember what we've done on this day, this evil day. required_blns.add( required_bln", "candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This is a helper function, existant", "copy T = True F = False D = \"_\" \"\"\" Generate the", "we are trying to reduce something for a 7 segment display over a", "\"_\". It fails \"is True\" but passes \"== True\". this is abused heavily.", "# Iterate over them for correlated_minterm in minterms_correlated_to_bln: # and remove the boolean", "index ^ dim2int # We're expanding the cube, verify that we're expanding it", "= idx self.mask = mask def __eq__(self, b): return self.idx == b.idx and", "idx we see above), and keeps track of which minterms are related to", "boolean array we are beginning at, where the expansion begins from. dim: int", "the rows and columns regarding the results found. Arguments: boolean array: dict (int,", "this day, this evil day. required_blns.add( required_bln ) for i in todelete: del", "boolean expression input -- say if we are trying to reduce something for", "set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) == 1: # WE found one! # Take", "all minterms correlated to the highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm))", "booleanish? in addition to True and False there is a third concept of", "candidate while len(minterms2bln): # We are looking at only a SINGLE minterm. #", "def __hash__(self): return hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self):", "get rid of booleans as we determine that they are \"the best candidate", "minterms for bln in minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate = bln most", "def _Provide_Index(dim): return range(0, dim) \"\"\" Performs the expansion of a cube through", "the integer representation of the inputs - The value points to whether that", "minterm # maps to only one boolean expression. required_blns = set() todelete =", "# that was just covered. bln_expr.mask += dim2int return covered_minterms, bln_expr class boolean_expression(object):", "most = len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now find all the minterms related", "over every solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps) for bln2minterms, minterms2bln in itr_list_of_maps:", "Now find all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] #", "the algorithm. A list of dictionarys. Each element of the list represents a", "T: covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr} # Allow", "into 1s, depending on the index new_index = index ^ dim2int # We're", "heavily. \"\"\" def Get_Truth_Table(LEN): tt = list() e = dict() e = {", "boolean statements and # the minterms they cover # lom = list() #", "minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate over them todelete = list() for correlated_minterm in", "is either True, False, or Don't Care. Reading from a file not yet", "Minterms if boolean_array[ns] is T: covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns]", "of minterms covered by the boolean expression bln_expr: boolean_expression - The boolean expression", "case where a minterm # maps to only one boolean expression. required_blns =", "total number of dimensions we are operating in. # REFERENCED BY VALUE minterms2bln:", "covered the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr = boolean_expression(idx,", "to the boolean expressions. Thus providing the program a quick view of the", "rid of booleans as we determine that they are \"the best candidate while", "= dict() e = { 0: T, 1: T, 2: D, 3: T,", "expressions to minterms and it maps the minterms to the boolean expressions. Thus", "integer representation of the inputs - The value points to whether that is", "\"\"\" def Get_Truth_Table(LEN): tt = list() e = dict() e = { 0:", "not _continue: # We like the new dimension, and are going to cover", "2019 @author: iaricanli \"\"\" import copy T = True F = False D", "to the integer representation of the inputs - The value points to whether", "as in the idx we see above), and keeps track of which minterms", "\"\"\" Performs the expansion of a cube through the Nspace Attempting to expand", "The boolean expression (defined by a point and then a mask) that covered", "it. space.extend(new_space) for ns in new_space: # If the value at the boolean", "the minterms they cover # lom = list() # Iterate over every solution-set", "dim2int = 2**i # The space being explored new_space = list() for index", "turn 1s into 0s and 0s into 1s, depending on the index new_index", "bln_expr = boolean_expression(idx, 0) # Define the space of the cube space =", "in integer form (aka A^!B^!C^D -> 1001 -> 9) and the value is", "as we determine that they are \"the best candidate while len(minterms2bln): # We", "minterms2bln[i] # Now we get rid of booleans as we determine that they", "else: # If the new space doesn't pan out _perfectly_, keep going to", "and # the minterms they cover # lom = list() # Iterate over", "boolean_array[ns] is T: covered_minterms.add(ns) if ns in minterms2bln: minterms2bln[ns].add(bln_expr) else: minterms2bln[ns] = {bln_expr}", "# Iterate over them todelete = list() for correlated_minterm in minterms_correlated_to_bln: # Delete", "= \"_\" \"\"\" Generate the inputs for the algorithm. A list of dictionarys.", "itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) == 1: #", "highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete them del", "return hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__()", "operation on every output set list_of_maps = Expand(truth_table, dim) list_of_covering_blns = Intersect(list_of_maps) return", "Arguments: boolean array: dict (int, truth) - The key maps to the integer", "expansion of a cube through the Nspace Attempting to expand through ever dimension,", "minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} # Iterate over the indices however we decide", "the expansion of a cube through the Nspace Attempting to expand through ever", "intersections between boolean statements and # the minterms they cover # lom =", "covered_minterms # bln2minterms and minterms2bln # are two dictionaries that are dually referent", "# We're expanding the cube, verify that we're expanding it into # valid", "the next index _continue = True break # We don't want to extend", "# Allow the Mask to contain the information regarding the dimension # that", "dict() for idx, bln in boolean_array.items(): if bln is T: covered_minterms, bln_expr =", "say if we are trying to reduce something for a 7 segment display", "need to turn 1s into 0s and 0s into 1s, depending on the", "to cover all the new # elements into it. space.extend(new_space) for ns in", "boolean expressions. Thus providing the program a quick view of the rows and", "we see above), and keeps track of which minterms are related to what", "bln in minterms2bln[minterm]: if len(bln2minterms[bln]) > most: best_candidate = bln most = len(bln2minterms[bln])", "mapped to True, False, or DC idx: int - The space in the", "del bln2minterms[required_bln] todelete.append(minterm) # And remember what we've done on this day, this", "# Take it required_bln = set_of_blns.pop() # Now find all the minterms related", "# If the new space doesn't pan out _perfectly_, keep going to #", "for related_bln, correlated_minterm in todelete: bln2minterms[related_bln].remove(correlated_minterm) # The ndelete the aforementioned best candidate", "dimension, and are going to cover all the new # elements into it.", "quick view of the rows and columns regarding the results found. Arguments: boolean", "and keep everything in linear time. minterm = Select_Minterm(minterms2bln) most = 0 best_candidate", "# maps to only one boolean expression. required_blns = set() todelete = list()", "next # dimension. if not _continue: # We like the new dimension, and", "Don'tCare which is being represented here as \"_\". It fails \"is True\" but", "Define truth table # truth_table = Get_Truth_Table(dim) # Perform the Expand operation on", "bln_expr: boolean_expression - The boolean expression (defined by a point and then a", "every boolean output # expr_per_output = list() for boolean_array in truth_table: bln2minterms= dict()", "The key is the boolean input represented in integer form (aka A^!B^!C^D ->", "else: minterms2bln[ns] = {bln_expr} # Allow the Mask to contain the information regarding", "was just covered. bln_expr.mask += dim2int return covered_minterms, bln_expr class boolean_expression(object): def __init__(self,", "view of the rows and columns regarding the results found. Arguments: boolean array:", "if it didn't pan out. So skip this one and move on to", "the boolean expressions. Thus providing the program a quick view of the rows", "F = False D = \"_\" \"\"\" Generate the inputs for the algorithm.", "really is what is a booleanish? in addition to True and False there", "(aka A^!B^!C^D -> 1001 -> 9) and the value is either True, False,", "Return: tt (truth table): list (dict (int, Booleanish)) - The only question here", "maps to the integer representation of the inputs - The value points to", "any case where a minterm # maps to only one boolean expression. required_blns", "beginning at, where the expansion begins from. dim: int - The total number", "e = { 0: T, 1: T, 2: D, 3: T, 4: F,", "a booleanish? in addition to True and False there is a third concept", "represented here as \"_\". It fails \"is True\" but passes \"== True\". this", "# True and not just Don't Care, add it to the Covered Minterms", "overall computation time # and keep everything in linear time. minterm = Select_Minterm(minterms2bln)", "going to cover all the new # elements into it. space.extend(new_space) for ns", "on Wed Mar 20 22:08:31 2019 @author: iaricanli \"\"\" import copy T =", "on to the next # dimension. if not _continue: # We like the", "# with the greatest number of related minterms for bln in minterms2bln[minterm]: if", "find all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate] # Iterate", "Convert the index into the representitive integer dim2int = 2**i # The space", "= { 0: T, 1: T, 2: D, 3: T, 4: F, 5:", "# The space being explored new_space = list() for index in space: #", "a 7 segment display over a period of multiple timesteps. Each dictionary represents", "this evil day. required_blns.add( required_bln ) for i in todelete: del minterms2bln[i] #", "# Delete all minterms correlated to the highest-scoring boolean for related_bln in minterms2bln[correlated_minterm]:", "fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output def Intersect( list_of_maps ): # # Finds intersections", "# # Define truth table # truth_table = Get_Truth_Table(dim) # Perform the Expand", "is a helper function, existant in case I want to expand the code", "iteration over the inputs. If this returned randomly, the code would no longer", "a SINGLE minterm. # Scanning a subspace to decrease overall computation time #", "expanding indices into list if new_index in boolean_array and boolean_array[new_index]: new_space.append(new_index) else: #", "expand the code and allow different iteration over the inputs. If this returned", "minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire boolean from the booly-books del bln2minterms[required_bln] todelete.append(minterm)", "longer necessarily output predictable results. \"\"\" def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): #", "want in here. Arguments: LEN : integer - The dimensionality of the desired", "{idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else: minterms2bln[idx] = {bln_expr} # Iterate over", "over the indices however we decide for i in _Provide_Index(dim): # Control variable", "minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over them for correlated_minterm in minterms_correlated_to_bln: # and", "todelete: del minterms2bln[i] # Now we get rid of booleans as we determine", "truth table # truth_table = Get_Truth_Table(dim) # Perform the Expand operation on every", "(truth table): list (dict (int, Booleanish)) - The only question here really is", "The key maps to the integer representation of the inputs - The value", "mask) that covered the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr", "1s into 0s and 0s into 1s, depending on the index new_index =", "determine that they are \"the best candidate while len(minterms2bln): # We are looking", "boolean_array.items(): if bln is T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim, minterms2bln) bln2minterms[bln_expr]", "dimensions we are operating in. # REFERENCED BY VALUE minterms2bln: dict (int, boolean_expression)", "1s, depending on the index new_index = index ^ dim2int # We're expanding", "= list() # Iterate over every solution-set per output itr_list_of_maps = copy.deepcopy(list_of_maps) for", "The ndelete the aforementioned best candidate del bln2minterms[best_candidate] lom.append(required_blns) return lom \"\"\" This", "you want in here. Arguments: LEN : integer - The dimensionality of the", "minterms2bln: dict (int, boolean_expression) - Maps the minterms (tracked by integer -- same", "all the new # elements into it. space.extend(new_space) for ns in new_space: #", "pan out. So skip this one and move on to the next #", "new_space = list() for index in space: # MAGIC LINE # We need", "(defined by a point and then a mask) that covered the aforementioned minterms.", "self.mask = mask def __eq__(self, b): return self.idx == b.idx and self.mask ==", "\"Best candidate\" as the boolean expression # with the greatest number of related", "loop _continue = False # Convert the index into the representitive integer dim2int", "> most: best_candidate = bln most = len(bln2minterms[bln]) required_blns.add( best_candidate ) # Now", "at only a SINGLE minterm. # Scanning a subspace to decrease overall computation", "representitive integer dim2int = 2**i # The space being explored new_space = list()", "the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr = boolean_expression(idx, 0)", "the indices however we decide for i in _Provide_Index(dim): # Control variable to", "Define the space of the cube space = [idx] covered_minterms = {idx} if", "providing the program a quick view of the rows and columns regarding the", "_Provide_Index(dim): # Control variable to exit a loop _continue = False # Convert", "the entire boolean from the booly-books del bln2minterms[required_bln] todelete.append(minterm) # And remember what", "We are looking at only a SINGLE minterm. # Scanning a subspace to", "dimension, one at a time. While it does this, it maps the boolean", "new space doesn't pan out _perfectly_, keep going to # the the next", "time. minterm = Select_Minterm(minterms2bln) most = 0 best_candidate = None # We determine", "best_candidate = None # We determine the \"Best candidate\" as the boolean expression", "= Select_Minterm(minterms2bln) most = 0 best_candidate = None # We determine the \"Best", "# and remove the boolean from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the", "required_blns.add( best_candidate ) # Now find all the minterms related to the boolean", "# Now find all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[best_candidate]", "where the expansion begins from. dim: int - The total number of dimensions", "it is valid, add the expanding indices into list if new_index in boolean_array", "day. required_blns.add( required_bln ) for i in todelete: del minterms2bln[i] # Now we", "# valid space. If it is valid, add the expanding indices into list", "boolean_array and boolean_array[new_index]: new_space.append(new_index) else: # If the new space doesn't pan out", "code would no longer necessarily output predictable results. \"\"\" def _Provide_Index(dim): return range(0,", "new dimension, and are going to cover all the new # elements into", "coding: utf-8 -*- \"\"\" Created on Wed Mar 20 22:08:31 2019 @author: iaricanli", "T = True F = False D = \"_\" \"\"\" Generate the inputs", "Reading from a file not yet supported. Do whatever you want in here.", "\"== True\". this is abused heavily. \"\"\" def Get_Truth_Table(LEN): tt = list() e", "False # Convert the index into the representitive integer dim2int = 2**i #", "from their knowledge minterms2bln[correlated_minterm].remove(required_bln) # Then delete the entire boolean from the booly-books", "over the inputs. If this returned randomly, the code would no longer necessarily", "the index into the representitive integer dim2int = 2**i # The space being", "we've done on this day, this evil day. required_blns.add( required_bln ) for i", "element of the list represents a different boolean expression input -- say if", "-*- \"\"\" Created on Wed Mar 20 22:08:31 2019 @author: iaricanli \"\"\" import", "LINE # We need to turn 1s into 0s and 0s into 1s,", "dually referent # in order to keep computations fast. expr_per_output.append((bln2minterms, minterms2bln)) return expr_per_output", "itr_list_of_maps: # First we're going to look for any case where a minterm", "at a time. While it does this, it maps the boolean expressions to", "integer - The dimensionality of the desired output truth table. AKA, the number", "dim, minterms2bln) bln2minterms[bln_expr] = covered_minterms # bln2minterms and minterms2bln # are two dictionaries", "this one and move on to the next # dimension. if not _continue:", "4: F, 5: F, 6: T, 7: T, } tt.append(e) return tt \"\"\"", "# bln2minterms and minterms2bln # are two dictionaries that are dually referent #", "cover all the new # elements into it. space.extend(new_space) for ns in new_space:", "set - The defined set of minterms covered by the boolean expression bln_expr:", "Do whatever you want in here. Arguments: LEN : integer - The dimensionality", "the Nspace Attempting to expand through ever dimension, one at a time. While", "int - The space in the boolean array we are beginning at, where", "boolean expression (defined by a point and then a mask) that covered the", "minterms2bln[correlated_minterm]: todelete.append((related_bln, correlated_minterm)) # Forreal, delete them del minterms2bln[correlated_minterm] for related_bln, correlated_minterm in", "idx: int - The space in the boolean array we are beginning at,", "is being represented here as \"_\". It fails \"is True\" but passes \"==", "are beginning at, where the expansion begins from. dim: int - The total", "rows and columns regarding the results found. Arguments: boolean array: dict (int, truth)", "table. AKA, the number of boolean variables. Return: tt (truth table): list (dict", "bln in boolean_array.items(): if bln is T: covered_minterms, bln_expr = Expand_Cube(boolean_array, idx, dim,", "entire boolean from the booly-books del bln2minterms[required_bln] todelete.append(minterm) # And remember what we've", "def Select_Minterm(minterms2bln): return list(minterms2bln.keys())[0] def main(dim): # # Define truth table # truth_table", "Perform the Expand operation on every output set list_of_maps = Expand(truth_table, dim) list_of_covering_blns", "^ dim2int # We're expanding the cube, verify that we're expanding it into", "_perfectly_, keep going to # the the next index _continue = True break", "return list(minterms2bln.keys())[0] def main(dim): # # Define truth table # truth_table = Get_Truth_Table(dim)", "ns in new_space: # If the value at the boolean array is specifically", "boolean array is specifically # True and not just Don't Care, add it", "boolean function. The key is the boolean input represented in integer form (aka", "def Intersect( list_of_maps ): # # Finds intersections between boolean statements and #", "new_space: # If the value at the boolean array is specifically # True", "if len(bln2minterms[bln]) > most: best_candidate = bln most = len(bln2minterms[bln]) required_blns.add( best_candidate )", "keep going to # the the next index _continue = True break #", "import copy T = True F = False D = \"_\" \"\"\" Generate", "time. While it does this, it maps the boolean expressions to minterms and", "idx, dim, minterms2bln): bln_expr = boolean_expression(idx, 0) # Define the space of the", "space in the boolean array we are beginning at, where the expansion begins", "required_blns = set() todelete = list() itr_minterms2bln = copy.deepcopy(minterms2bln) for minterm, set_of_blns in", "copy.deepcopy(minterms2bln) for minterm, set_of_blns in itr_minterms2bln.items(): if len(set_of_blns) == 1: # WE found", "cube space = [idx] covered_minterms = {idx} if idx in minterms2bln: minterms2bln[idx].add(bln_expr) else:", "hash((self.idx, self.mask)) def __str__(self): return \"boolean_expression({0}, {1})\".format(self.idx, self.mask) def __repr__(self): return self.__str__() def", "in truth_table: bln2minterms= dict() minterms2bln = dict() for idx, bln in boolean_array.items(): if", "expansion begins from. dim: int - The total number of dimensions we are", "in minterms_correlated_to_bln: # Delete all minterms correlated to the highest-scoring boolean for related_bln", "here as \"_\". It fails \"is True\" but passes \"== True\". this is", "of the selected index # if it didn't pan out. So skip this", "in boolean_array and boolean_array[new_index]: new_space.append(new_index) else: # If the new space doesn't pan", "all the minterms related to the boolean minterms_correlated_to_bln = bln2minterms[required_bln] # Iterate over", "looking at only a SINGLE minterm. # Scanning a subspace to decrease overall", "a subspace to decrease overall computation time # and keep everything in linear", "+= dim2int return covered_minterms, bln_expr class boolean_expression(object): def __init__(self, idx, mask): self.idx =", "(tracked by integer -- same as in the idx we see above), and", "= dict() for idx, bln in boolean_array.items(): if bln is T: covered_minterms, bln_expr", "# REFERENCED BY VALUE minterms2bln: dict (int, boolean_expression) - Maps the minterms (tracked", "something for a 7 segment display over a period of multiple timesteps. Each", "that covered the aforementioned minterms. \"\"\" def Expand_Cube(boolean_array, idx, dim, minterms2bln): bln_expr =" ]
[ "\"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR = Path(__file__).parent.parent LOCALES_DIR =", "self.config = yaml.full_load(file.read()) except Exception: self.config = {} self.environ = environ def get(self,", "default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\",", "encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read()) except Exception: self.config = {} self.environ =", "yaml.full_load(file.read()) except Exception: self.config = {} self.environ = environ def get(self, param, default=None):", "file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read()) except Exception: self.config", "from os import environ class Config: def __init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\")", "\"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\",", "]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"],", "= ( self.environ.get(param.upper()) or self.config.get(param, default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30)", "config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\",", "def __init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read()) except", "\"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"])", "\"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"],", "\"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"},", "config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\")", "config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\",", "\"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\":", "except Exception: self.config = {} self.environ = environ def get(self, param, default=None): globals()[param.upper()]", "self.environ.get(param.upper()) or self.config.get(param, default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[])", "= {} self.environ = environ def get(self, param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper())", "default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=())", "\"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\":", "config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\",", "config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\")", "config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\",", "\"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}])", "default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\",", "\"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR = Path(__file__).parent.parent", "[\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\",", "\"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\"", "default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\",", "Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\",", "{\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\":", "config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[", "\"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\":", "config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\",", "pathlib import Path from os import environ class Config: def __init__(self, file_path=\"config.yml\"): try:", "\"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"],", "\"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\",", "{\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\":", "\"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR = Path(__file__).parent.parent LOCALES_DIR = BASE_DIR / \"i18n\"", "default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\",", "config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\",", "config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\")", "def get(self, param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param, default)) config =", "default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\",", "import environ class Config: def __init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as file:", "[\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", {", "config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\",", "\"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\":", "Exception: self.config = {} self.environ = environ def get(self, param, default=None): globals()[param.upper()] =", "\"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\",", "\"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\":", "config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\",", "globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param, default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\",", "{\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\",", "\"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR = Path(__file__).parent.parent LOCALES_DIR = BASE_DIR /", "\"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\",", "\"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\":", "import yaml from pathlib import Path from os import environ class Config: def", "Config: def __init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read())", "\"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\":", "\"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"},", "open(file_path, encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read()) except Exception: self.config = {} self.environ", "default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\":", "\"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR =", "\"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"],", "( self.environ.get(param.upper()) or self.config.get(param, default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\",", "[\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"],", "Path from os import environ class Config: def __init__(self, file_path=\"config.yml\"): try: with open(file_path,", "\"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR = Path(__file__).parent.parent LOCALES_DIR", "default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\",", "\"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"},", "= Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\")", "\"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\",", "= yaml.full_load(file.read()) except Exception: self.config = {} self.environ = environ def get(self, param,", "default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\",", "{\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\":", "{} self.environ = environ def get(self, param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or", "default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\",", "default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param, default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\")", "yaml from pathlib import Path from os import environ class Config: def __init__(self,", "file: self.config = yaml.full_load(file.read()) except Exception: self.config = {} self.environ = environ def", "config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\",", "\"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\",", "config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False)", "\"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\",", "from pathlib import Path from os import environ class Config: def __init__(self, file_path=\"config.yml\"):", "[\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] })", "\"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR = Path(__file__).parent.parent LOCALES_DIR = BASE_DIR", "default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\",", "{ \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" }) BASE_DIR", "\"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\":", "os import environ class Config: def __init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as", "default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\",", "or self.config.get(param, default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\",", "[\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\",", "\"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"]", "[\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\",", "default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\",", "default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None) config.get(\"langs_list\", default=[", "[\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"],", "config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\": \"<KEY>\" })", "\"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={", "class Config: def __init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as file: self.config =", "\"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\",", "\"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\"", "import Path from os import environ class Config: def __init__(self, file_path=\"config.yml\"): try: with", "\"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ])", "environ def get(self, param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param, default)) config", "[\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\",", "try: with open(file_path, encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read()) except Exception: self.config =", "\"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\":", "config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False)", "default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False) config.get(\"youtube_channels\", default=()) config.get(\"youtube_key\", default=None)", "default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"],", "get(self, param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param, default)) config = Config()", "default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\",", "\"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\", \"xui\": \"<KEY>\", \"net_xua\":", "with open(file_path, encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read()) except Exception: self.config = {}", "as file: self.config = yaml.full_load(file.read()) except Exception: self.config = {} self.environ = environ", "default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"},", "self.config.get(param, default)) config = Config() config.get(\"db_path\", default=\"jdanbot.db\") config.get(\"delay\", default=30) config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False)", "config.get(\"langs_list\", default=[ \"ru\", \"en\", \"sv\", \"de\", \"ce\", \"tt\", \"ba\", \"pl\", \"uk\", \"be\", \"es\",", "param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param, default)) config = Config() config.get(\"db_path\",", "[\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\",", "\"ba\", \"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\",", "\"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\",", "\"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\",", "\"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\": \"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\",", "config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\":", "\"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\":", "__init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as file: self.config = yaml.full_load(file.read()) except Exception:", "config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=()) config.get(\"access_token\", default=\"\") config.get(\"katz_bots\", default=False) config.get(\"youtube\", default=False)", "\"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\", \"v\"], \"uk\": [\"wikiua\",", "\"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\", default=[ {\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\":", "config.get(\"rss_feeds\", default=[]) config.get(\"rss\", default=False) config.get(\"image_path\", default=\"bot/cache/{image}.jpg\") config.get(\"token\") config.get(\"status\", default=\"unknown\") config.get(\"vk\", default=False) config.get(\"vk_channels\", default=())", "\"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"], \"en\": [\"van\", \"wen\",", "self.config = {} self.environ = environ def get(self, param, default=None): globals()[param.upper()] = (", "{\"commands\": [\"java1\"], \"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\":", "\"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\",", "\"longcum.ogg\"}, {\"commands\": [\"frog\"], \"audio\": \"lyagushka.ogg\"}]) config.get(\"stickers\", { \"pizda\": \"<KEY>\", \"net_pizdy\": \"<KEY>\", \"pizda_tebe\": \"<KEY>\",", "\"pl\", \"uk\", \"be\", \"es\", \"he\", \"xh\", \"ab\"]) config.get(\"unique_commands\", default={ \"ru\": [\"wikiru2\", \"w\", \"wiki\"],", "environ class Config: def __init__(self, file_path=\"config.yml\"): try: with open(file_path, encoding=\"UTF-8\") as file: self.config", "}) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\", \"__enable_response__\", \"__enable_welcome__\", \"__enable_greatings__\", \"__warns_to_ban__\" ]) config.get(\"eggs\",", "\"audio\": \"java.ogg\"}, {\"commands\": [\"cool_music\"], \"audio\": \"music.ogg\"}, {\"commands\": [\"cum\"], \"audio\": \"cum.ogg\"}, {\"commands\": [\"longcum\"], \"audio\":", "self.environ = environ def get(self, param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param,", "\"v\"], \"uk\": [\"wikiua\", \"wua\", \"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[", "\"pawuk\"], \"be-tarask\": [\"wikibe-tarask\", \"wikibet\", \"wbet\", \"xbet\"] }) config.get(\"admin_notes\", default=[ \"__rules__\", \"__enable_bot__\", \"__ban__\", \"__welcome__\",", "= environ def get(self, param, default=None): globals()[param.upper()] = ( self.environ.get(param.upper()) or self.config.get(param, default))" ]
[ "Used for staging events and publishing them in one transaction. \"\"\" _publisher: Optional[EventPublisher]", "from typing import Any, Dict, List, Optional, Tuple from mediator.event.base import EventPublisher class", "self \"\"\" self._publisher = publisher return self async def commit(self): \"\"\" Commits staged", "def cleanup(self): \"\"\" Clears all staged events. \"\"\" self._staged.clear() def enqueue(self, obj: Any,", "\"\"\" self._publisher = publisher return self async def commit(self): \"\"\" Commits staged events", "aggregate object is not properly configured. \"\"\" class EventAggregate: \"\"\" Event aggregate object.", "def __init__(self): \"\"\" Initializes empty event aggregate. \"\"\" self._publisher = None self._staged =", "event publisher used for event sending. :param publisher: event publisher to use :return:", "to use :return: self \"\"\" self._publisher = publisher return self async def commit(self):", "publisher is None: raise ConfigEventAggregateError(f\"Publisher is not set in {self!r}\") async with publisher.transaction()", "Stages given event object with optional extra arguments. :param obj: event object :param", "self._staged = [] def use(self, publisher: EventPublisher): \"\"\" Sets event publisher used for", "self._publisher = publisher return self async def commit(self): \"\"\" Commits staged events by", "\"\"\" Event aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate", "\"\"\" self._staged.clear() def enqueue(self, obj: Any, **kwargs): \"\"\" Stages given event object with", "Commits staged events by underlying publisher. \"\"\" publisher = self._publisher if publisher is", "context: for obj, kwargs in self._staged: await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\"", "_publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\" Initializes empty event aggregate.", "from mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate base error \"\"\" class", "class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate error. Raised when event aggregate object", "with publisher.transaction() as context: for obj, kwargs in self._staged: await context.publish(obj, **kwargs) self._staged.clear()", "is None: raise ConfigEventAggregateError(f\"Publisher is not set in {self!r}\") async with publisher.transaction() as", "Optional, Tuple from mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate base error", "by underlying publisher. \"\"\" publisher = self._publisher if publisher is None: raise ConfigEventAggregateError(f\"Publisher", "obj: Any, **kwargs): \"\"\" Stages given event object with optional extra arguments. :param", "Any, Dict, List, Optional, Tuple from mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\" Event", "publisher return self async def commit(self): \"\"\" Commits staged events by underlying publisher.", "_staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\" Initializes empty event aggregate. \"\"\" self._publisher", "staged events by underlying publisher. \"\"\" publisher = self._publisher if publisher is None:", "cleanup(self): \"\"\" Clears all staged events. \"\"\" self._staged.clear() def enqueue(self, obj: Any, **kwargs):", "\"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate error. Raised when event aggregate", "def enqueue(self, obj: Any, **kwargs): \"\"\" Stages given event object with optional extra", "use :return: self \"\"\" self._publisher = publisher return self async def commit(self): \"\"\"", "in self._staged: await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears all staged events.", "\"\"\" Sets event publisher used for event sending. :param publisher: event publisher to", "Event aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate error.", "publisher = self._publisher if publisher is None: raise ConfigEventAggregateError(f\"Publisher is not set in", "configured. \"\"\" class EventAggregate: \"\"\" Event aggregate object. Used for staging events and", "for staging events and publishing them in one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged:", "self async def commit(self): \"\"\" Commits staged events by underlying publisher. \"\"\" publisher", "given event object with optional extra arguments. :param obj: event object :param kwargs:", "\"\"\" Initializes empty event aggregate. \"\"\" self._publisher = None self._staged = [] def", "Any]]] def __init__(self): \"\"\" Initializes empty event aggregate. \"\"\" self._publisher = None self._staged", "self._publisher = None self._staged = [] def use(self, publisher: EventPublisher): \"\"\" Sets event", "await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears all staged events. \"\"\" self._staged.clear()", "None: raise ConfigEventAggregateError(f\"Publisher is not set in {self!r}\") async with publisher.transaction() as context:", "them in one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self):", "used for event sending. :param publisher: event publisher to use :return: self \"\"\"", "self._staged.clear() def enqueue(self, obj: Any, **kwargs): \"\"\" Stages given event object with optional", "as context: for obj, kwargs in self._staged: await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self):", "async with publisher.transaction() as context: for obj, kwargs in self._staged: await context.publish(obj, **kwargs)", "self._publisher if publisher is None: raise ConfigEventAggregateError(f\"Publisher is not set in {self!r}\") async", "class EventAggregate: \"\"\" Event aggregate object. Used for staging events and publishing them", "publisher.transaction() as context: for obj, kwargs in self._staged: await context.publish(obj, **kwargs) self._staged.clear() def", "List, Optional, Tuple from mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate base", "publishing them in one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def", "not set in {self!r}\") async with publisher.transaction() as context: for obj, kwargs in", "all staged events. \"\"\" self._staged.clear() def enqueue(self, obj: Any, **kwargs): \"\"\" Stages given", "Sets event publisher used for event sending. :param publisher: event publisher to use", "staging events and publishing them in one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any,", "aggregate error. Raised when event aggregate object is not properly configured. \"\"\" class", "Dict[str, Any]]] def __init__(self): \"\"\" Initializes empty event aggregate. \"\"\" self._publisher = None", "self._staged.clear() def cleanup(self): \"\"\" Clears all staged events. \"\"\" self._staged.clear() def enqueue(self, obj:", "enqueue(self, obj: Any, **kwargs): \"\"\" Stages given event object with optional extra arguments.", ":param publisher: event publisher to use :return: self \"\"\" self._publisher = publisher return", "mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError,", "use(self, publisher: EventPublisher): \"\"\" Sets event publisher used for event sending. :param publisher:", "set in {self!r}\") async with publisher.transaction() as context: for obj, kwargs in self._staged:", "import Any, Dict, List, Optional, Tuple from mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\"", "if publisher is None: raise ConfigEventAggregateError(f\"Publisher is not set in {self!r}\") async with", "when event aggregate object is not properly configured. \"\"\" class EventAggregate: \"\"\" Event", "Dict, List, Optional, Tuple from mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate", "__init__(self): \"\"\" Initializes empty event aggregate. \"\"\" self._publisher = None self._staged = []", "in {self!r}\") async with publisher.transaction() as context: for obj, kwargs in self._staged: await", "not properly configured. \"\"\" class EventAggregate: \"\"\" Event aggregate object. Used for staging", "{self!r}\") async with publisher.transaction() as context: for obj, kwargs in self._staged: await context.publish(obj,", "ConfigEventAggregateError(f\"Publisher is not set in {self!r}\") async with publisher.transaction() as context: for obj,", "EventAggregateError(Exception): \"\"\" Event aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event", "is not properly configured. \"\"\" class EventAggregate: \"\"\" Event aggregate object. Used for", "aggregate object. Used for staging events and publishing them in one transaction. \"\"\"", "error. Raised when event aggregate object is not properly configured. \"\"\" class EventAggregate:", "event object with optional extra arguments. :param obj: event object :param kwargs: optional", "= None self._staged = [] def use(self, publisher: EventPublisher): \"\"\" Sets event publisher", "EventPublisher): \"\"\" Sets event publisher used for event sending. :param publisher: event publisher", "\"\"\" Commits staged events by underlying publisher. \"\"\" publisher = self._publisher if publisher", "\"\"\" Clears all staged events. \"\"\" self._staged.clear() def enqueue(self, obj: Any, **kwargs): \"\"\"", "event publisher to use :return: self \"\"\" self._publisher = publisher return self async", "\"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\" Initializes empty event", "context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears all staged events. \"\"\" self._staged.clear() def", "empty event aggregate. \"\"\" self._publisher = None self._staged = [] def use(self, publisher:", "EventAggregateError): \"\"\" Config event aggregate error. Raised when event aggregate object is not", "def commit(self): \"\"\" Commits staged events by underlying publisher. \"\"\" publisher = self._publisher", "async def commit(self): \"\"\" Commits staged events by underlying publisher. \"\"\" publisher =", "publisher used for event sending. :param publisher: event publisher to use :return: self", "Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\" Initializes empty event aggregate. \"\"\"", "properly configured. \"\"\" class EventAggregate: \"\"\" Event aggregate object. Used for staging events", "arguments. :param obj: event object :param kwargs: optional extra arguments \"\"\" self._staged.append((obj, kwargs))", "import EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError):", "transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\" Initializes empty", "List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\" Initializes empty event aggregate. \"\"\" self._publisher =", "Config event aggregate error. Raised when event aggregate object is not properly configured.", "obj, kwargs in self._staged: await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears all", "optional extra arguments. :param obj: event object :param kwargs: optional extra arguments \"\"\"", "**kwargs): \"\"\" Stages given event object with optional extra arguments. :param obj: event", "one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\" Initializes", "return self async def commit(self): \"\"\" Commits staged events by underlying publisher. \"\"\"", "\"\"\" Config event aggregate error. Raised when event aggregate object is not properly", "error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate error. Raised when event", "Initializes empty event aggregate. \"\"\" self._publisher = None self._staged = [] def use(self,", "Raised when event aggregate object is not properly configured. \"\"\" class EventAggregate: \"\"\"", "= [] def use(self, publisher: EventPublisher): \"\"\" Sets event publisher used for event", "for event sending. :param publisher: event publisher to use :return: self \"\"\" self._publisher", "\"\"\" self._publisher = None self._staged = [] def use(self, publisher: EventPublisher): \"\"\" Sets", "events by underlying publisher. \"\"\" publisher = self._publisher if publisher is None: raise", "raise ConfigEventAggregateError(f\"Publisher is not set in {self!r}\") async with publisher.transaction() as context: for", "sending. :param publisher: event publisher to use :return: self \"\"\" self._publisher = publisher", "publisher: EventPublisher): \"\"\" Sets event publisher used for event sending. :param publisher: event", "underlying publisher. \"\"\" publisher = self._publisher if publisher is None: raise ConfigEventAggregateError(f\"Publisher is", "Any, **kwargs): \"\"\" Stages given event object with optional extra arguments. :param obj:", "ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate error. Raised when event aggregate object is", "EventAggregate: \"\"\" Event aggregate object. Used for staging events and publishing them in", "staged events. \"\"\" self._staged.clear() def enqueue(self, obj: Any, **kwargs): \"\"\" Stages given event", "\"\"\" Stages given event object with optional extra arguments. :param obj: event object", "aggregate. \"\"\" self._publisher = None self._staged = [] def use(self, publisher: EventPublisher): \"\"\"", "publisher to use :return: self \"\"\" self._publisher = publisher return self async def", "aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate error. Raised", "for obj, kwargs in self._staged: await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears", "= self._publisher if publisher is None: raise ConfigEventAggregateError(f\"Publisher is not set in {self!r}\")", "event aggregate error. Raised when event aggregate object is not properly configured. \"\"\"", "Clears all staged events. \"\"\" self._staged.clear() def enqueue(self, obj: Any, **kwargs): \"\"\" Stages", "event aggregate. \"\"\" self._publisher = None self._staged = [] def use(self, publisher: EventPublisher):", "self._staged: await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears all staged events. \"\"\"", "\"\"\" Event aggregate object. Used for staging events and publishing them in one", "base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config event aggregate error. Raised when", "object. Used for staging events and publishing them in one transaction. \"\"\" _publisher:", "kwargs in self._staged: await context.publish(obj, **kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears all staged", "Tuple from mediator.event.base import EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate base error \"\"\"", "EventPublisher class EventAggregateError(Exception): \"\"\" Event aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\"", "None self._staged = [] def use(self, publisher: EventPublisher): \"\"\" Sets event publisher used", "event aggregate object is not properly configured. \"\"\" class EventAggregate: \"\"\" Event aggregate", "event sending. :param publisher: event publisher to use :return: self \"\"\" self._publisher =", "Event aggregate object. Used for staging events and publishing them in one transaction.", "\"\"\" publisher = self._publisher if publisher is None: raise ConfigEventAggregateError(f\"Publisher is not set", "**kwargs) self._staged.clear() def cleanup(self): \"\"\" Clears all staged events. \"\"\" self._staged.clear() def enqueue(self,", "object with optional extra arguments. :param obj: event object :param kwargs: optional extra", "extra arguments. :param obj: event object :param kwargs: optional extra arguments \"\"\" self._staged.append((obj,", "events and publishing them in one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str,", "[] def use(self, publisher: EventPublisher): \"\"\" Sets event publisher used for event sending.", "class EventAggregateError(Exception): \"\"\" Event aggregate base error \"\"\" class ConfigEventAggregateError(AssertionError, EventAggregateError): \"\"\" Config", "object is not properly configured. \"\"\" class EventAggregate: \"\"\" Event aggregate object. Used", "\"\"\" class EventAggregate: \"\"\" Event aggregate object. Used for staging events and publishing", "and publishing them in one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]]", "def use(self, publisher: EventPublisher): \"\"\" Sets event publisher used for event sending. :param", ":return: self \"\"\" self._publisher = publisher return self async def commit(self): \"\"\" Commits", "with optional extra arguments. :param obj: event object :param kwargs: optional extra arguments", "events. \"\"\" self._staged.clear() def enqueue(self, obj: Any, **kwargs): \"\"\" Stages given event object", "publisher. \"\"\" publisher = self._publisher if publisher is None: raise ConfigEventAggregateError(f\"Publisher is not", "publisher: event publisher to use :return: self \"\"\" self._publisher = publisher return self", "typing import Any, Dict, List, Optional, Tuple from mediator.event.base import EventPublisher class EventAggregateError(Exception):", "= publisher return self async def commit(self): \"\"\" Commits staged events by underlying", "is not set in {self!r}\") async with publisher.transaction() as context: for obj, kwargs", "in one transaction. \"\"\" _publisher: Optional[EventPublisher] _staged: List[Tuple[Any, Dict[str, Any]]] def __init__(self): \"\"\"", "commit(self): \"\"\" Commits staged events by underlying publisher. \"\"\" publisher = self._publisher if" ]
[ "as xml from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def", "@session_decorator def setname(self, context): user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return self.redirect_page(context,", "%s</p>\"\"\" page = page % ( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form =", "enter your name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s /> <input type=\"submit\"", "page = page % ( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return", "pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher()", "page) @session_decorator def setname(self, context): user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return", "return self.html_response(context, page) @session_decorator def setname(self, context): user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name)", "setname(self, context): user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return self.redirect_page(context, context.get_app_root()) if", "form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page) @session_decorator def setname(self,", "settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def", "%s </body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page = page % (", "import SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\",", "True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page) @session_decorator def setname(self, context): user_name =", "Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page =", "</form>\"\"\" page = page % ( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200)", "init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context): page =", "#! /usr/bin/env python import pyslet.xml.structures as xml from pyslet.wsgi import SessionApp, session_decorator class", "def home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']:", "</body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page = page % ( noform", "import pyslet.xml.structures as xml from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file =", "= page % ( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context,", "page = page % ( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form", "<input type=\"hidden\" name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page %", "\"\"\"<p>Welcome: %s</p>\"\"\" page = page % ( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form", "context.set_status(200) return self.html_response(context, page) @session_decorator def setname(self, context): user_name = context.get_form_string('name') if user_name:", "% xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your name:", "= \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\"", "action=\"setname\"> <p>Please enter your name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s />", "form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your name: <input type=\"text\" name=\"name\"/> <input", "<input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page", "Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page = page %", "type=\"hidden\" name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page % (", "type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page % ( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(),", "noform = \"\"\"<p>Welcome: %s</p>\"\"\" page = page % ( noform % xml.EscapeCharData( context.session.entity['UserName'].value))", "xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your name: <input", "<h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page = page", "SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home)", "def setname(self, context): user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return self.redirect_page(context, context.get_app_root())", "page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome:", "% ( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page) @session_decorator", "self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session", "page % ( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\" action=\"setname\">", "method=\"POST\" action=\"setname\"> <p>Please enter your name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s", "else: form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your name: <input type=\"text\" name=\"name\"/>", "\"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s", "% (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page) @session_decorator def setname(self, context):", "True))) context.set_status(200) return self.html_response(context, page) @session_decorator def setname(self, context): user_name = context.get_form_string('name') if", "= \"\"\"<p>Welcome: %s</p>\"\"\" page = page % ( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else:", "your name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p>", "= context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return self.redirect_page(context, context.get_app_root()) if __name__ == \"__main__\":", "(xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page) @session_decorator def setname(self, context): user_name", "= 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self,", "context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform =", "context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your name: <input type=\"text\"", "/usr/bin/env python import pyslet.xml.structures as xml from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp):", "@session_decorator def home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if", "xml from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self):", "context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return self.redirect_page(context, context.get_app_root()) if __name__ == \"__main__\": MyApp.main()", "<p>Please enter your name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s /> <input", "from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp,", "session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\",", "home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform", "( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter", "context): user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return self.redirect_page(context, context.get_app_root()) if __name__", "noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your", "page % ( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page)", "self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body>", "<input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page % ( form % (xml.EscapeCharData(self.csrf_token, True),", "( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page) @session_decorator def", "name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page % ( form", "MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator", "class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname)", "xml.EscapeCharData(context.session.sid(), True))) context.set_status(200) return self.html_response(context, page) @session_decorator def setname(self, context): user_name = context.get_form_string('name')", "user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch() return self.redirect_page(context, context.get_app_root()) if __name__ ==", "value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page % ( form %", "self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1>", "pyslet.xml.structures as xml from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file = 'samples/wsgi_session/settings.json'", "'samples/wsgi_session/settings.json' def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context):", "python import pyslet.xml.structures as xml from pyslet.wsgi import SessionApp, session_decorator class MyApp(SessionApp): settings_file", "name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\"", "def init_dispatcher(self): super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context): page", "if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page = page % ( noform %", "= page % ( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\"", "self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s", "self.html_response(context, page) @session_decorator def setname(self, context): user_name = context.get_form_string('name') if user_name: context.session.entity['UserName'].set_from_value(user_name) context.session.touch()", "% ( noform % xml.EscapeCharData( context.session.entity['UserName'].value)) else: form = \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please", "value=\"Set\"/></p> </form>\"\"\" page = page % ( form % (xml.EscapeCharData(self.csrf_token, True), xml.EscapeCharData(context.session.sid(), True)))", "= \"\"\"<form method=\"POST\" action=\"setname\"> <p>Please enter your name: <input type=\"text\" name=\"name\"/> <input type=\"hidden\"", "name=\"name\"/> <input type=\"hidden\" name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page", "/> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page = page % ( form % (xml.EscapeCharData(self.csrf_token,", "\"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\" if context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page", "super(MyApp, self).init_dispatcher() self.set_method(\"/\", self.home) self.set_method(\"/setname\", self.setname) @session_decorator def home(self, context): page = \"\"\"<html><head><title>Session", "context.session.entity['UserName']: noform = \"\"\"<p>Welcome: %s</p>\"\"\" page = page % ( noform % xml.EscapeCharData(", "type=\"text\" name=\"name\"/> <input type=\"hidden\" name=%s value=%s /> <input type=\"submit\" value=\"Set\"/></p> </form>\"\"\" page =", "self.setname) @session_decorator def home(self, context): page = \"\"\"<html><head><title>Session Page</title></head><body> <h1>Session Page</h1> %s </body></html>\"\"\"" ]
[ "ind, l in enumerate(all_layers): # print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) # Train your", "', l.name, 'trainable' ,l.trainable,'index',ind) # Train your model as usual. # You can", "from facetool import FaceTool def train_face_model(finetune = True): #===============custom parameters =============== # hidden_dim", "= 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # ===============", "facetool import FaceTool def train_face_model(finetune = True): #===============custom parameters =============== # hidden_dim =", "pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers): if ind <= pool5_index: l.trainable", "#OK # adagrad = optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta( ) opt =", "= FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid,", "# ===============Save Model=============== print(\"Saved model to disk\") model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\")", "load_face_data from facetool import FaceTool def train_face_model(finetune = True): #===============custom parameters =============== #", "* One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch = 10 batch_size = 16", "weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index", "'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg')", "custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers): if ind <= pool5_index:", "ind <= pool5_index: l.trainable = False # all_layers[:pool5_index].trainable = False # for ind,", "= 16 train_data_dir = 'data/train' validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 =", "save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # =============== NN =============== # vgg_model =", "<= pool5_index: l.trainable = False # all_layers[:pool5_index].trainable = False # for ind, l", "to disk\") model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file: json_file.write(model_json) # serialize", "batch_size = 16 train_data_dir = 'data/train' validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5", "Face_Label_Dic = load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size,", "# adagrad = optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5,", "nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch = 10 batch_size = 16 train_data_dir =", "score = log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved model to disk\") model_json =", "prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg') print Face_Label_Dic[face_index] if __name__ ==", "import FaceTool def train_face_model(finetune = True): #===============custom parameters =============== # hidden_dim = 512", "= nb_class * One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch = 10 batch_size", "# =============== NN =============== # vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After", "last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x) x =", "x = Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class,", "print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in", "all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers): if ind", "print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) # Train your model as usual. # You", "One_Class_Valid_MAX = 10 nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX", "verbose=1, validation_data=(X_valid, Y_valid), ) # Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) #", "save_face_index = './faceDB/face-index.json' # =============== NN =============== # vgg_model = VGGFace(include_top=False, input_shape=(224, 224,", "import Flatten, Dense, Input from keras import optimizers from keras.preprocessing.image import ImageDataGenerator from", "512 img_width, img_height = 224, 224 nb_class = 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX", "Y_valid), ) # Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss", "finetune: # print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind,", "Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg') print Face_Label_Dic[face_index]", "img_width, img_height = 224, 224 nb_class = 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX =", "Model from keras.layers import Flatten, Dense, Input from keras import optimizers from keras.preprocessing.image", "= Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out)", "if finetune: # print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for", "X_train, Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start", "can Try different optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad =", "loss score score = log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved model to disk\")", "to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index =", "= 10 batch_size = 16 train_data_dir = 'data/train' validation_data_dir = 'data/validation' save_model_path =", "hidden_dim = 512 img_width, img_height = 224, 224 nb_class = 16 One_Class_Train_MAX =", "10 batch_size = 16 train_data_dir = 'data/train' validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json'", "prdict_one_face from load_face_data import load_face_data from facetool import FaceTool def train_face_model(finetune = True):", "# vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After Add finetune layers----------------') #", "batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) # Make predictions predictions_valid = custom_vgg_model.predict(X_valid,", "decay=1e-6) #OK # adagrad = optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta( ) opt", "Y_valid, Face_Label_Dic = load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train,", "= optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad = optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta(", "model to disk\") model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file: json_file.write(model_json) #", "import Model from keras.layers import Flatten, Dense, Input from keras import optimizers from", "FaceTool def train_face_model(finetune = True): #===============custom parameters =============== # hidden_dim = 512 img_width,", "'./faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # =============== NN =============== # vgg_model", "in enumerate(all_layers): # print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) # Train your model as", "log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved model to disk\") model_json = custom_vgg_model.to_json() with", "VGGFace from sklearn.metrics import log_loss from one_face_predict import prdict_one_face from load_face_data import load_face_data", "keras import optimizers from keras.preprocessing.image import ImageDataGenerator from vggface import VGGFace from sklearn.metrics", "train_data_dir = 'data/train' validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index", "VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After Add finetune layers----------------') # for l in", "layers----------------') # for l in vgg_model.layers: # print('Name ', l.name, 'trainable' ,l.trainable) last_layer", "opt = optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train,", "custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file: json_file.write(model_json) # serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5)", "Model(vgg_model.input, out) if finetune: # print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index =", "activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model", "all_layers[:pool5_index].trainable = False # for ind, l in enumerate(all_layers): # print('Name ', l.name,", "activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if finetune: # print('----------------After Disable Trainable----------------') all_layers", "open(save_model_path, \"w\") as json_file: json_file.write(model_json) # serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test===============", ",l.trainable) last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x) x", "= False # for ind, l in enumerate(all_layers): # print('Name ', l.name, 'trainable'", "= 30 One_Class_Valid_MAX = 10 nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples = nb_class", "One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch = 10 batch_size = 16 train_data_dir", "for l in vgg_model.layers: # print('Name ', l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output", "# hidden_dim = 512 img_width, img_height = 224, 224 nb_class = 16 One_Class_Train_MAX", "= load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,", "custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) # Make predictions predictions_valid", "= 10 nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch", "decay=1e-6) # opt = optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])", "one_face_predict import prdict_one_face from load_face_data import load_face_data from facetool import FaceTool def train_face_model(finetune", "input_shape=(224, 224, 3)) # print('----------------After Add finetune layers----------------') # for l in vgg_model.layers:", "224 nb_class = 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10 nb_train_samples = nb_class", "from keras import optimizers from keras.preprocessing.image import ImageDataGenerator from vggface import VGGFace from", "= vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim,", "validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' #", "name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if finetune: #", "False # all_layers[:pool5_index].trainable = False # for ind, l in enumerate(all_layers): # print('Name", "import optimizers from keras.preprocessing.image import ImageDataGenerator from vggface import VGGFace from sklearn.metrics import", "= optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt,", "= optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train,", "nb_class * One_Class_Valid_MAX nb_epoch = 10 batch_size = 16 train_data_dir = 'data/train' validation_data_dir", "vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After Add finetune layers----------------') # for", "= Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x) out", "keras.engine import Model from keras.layers import Flatten, Dense, Input from keras import optimizers", "# for ind, l in enumerate(all_layers): # print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) #", "', l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim,", "Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x)", "import ImageDataGenerator from vggface import VGGFace from sklearn.metrics import log_loss from one_face_predict import", "One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10 nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples =", "= custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss score score = log_loss(Y_valid, predictions_valid) #", "img_height = 224, 224 nb_class = 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10", "enumerate(all_layers): if ind <= pool5_index: l.trainable = False # all_layers[:pool5_index].trainable = False #", "= True): #===============custom parameters =============== # hidden_dim = 512 img_width, img_height = 224,", "in vgg_model.layers: # print('Name ', l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x =", "Model=============== print(\"Saved model to disk\") model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file:", "= VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After Add finetune layers----------------') # for l", "224, 3)) # print('----------------After Add finetune layers----------------') # for l in vgg_model.layers: #", "=============== # vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After Add finetune layers----------------')", "from load_face_data import load_face_data from facetool import FaceTool def train_face_model(finetune = True): #===============custom", "l.name, 'trainable' ,l.trainable,'index',ind) # Train your model as usual. # You can Try", "# opt = optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary()", "loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool = FaceTool()", "in enumerate(all_layers): if ind <= pool5_index: l.trainable = False # all_layers[:pool5_index].trainable = False", "shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) # Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1)", "= Model(vgg_model.input, out) if finetune: # print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index", "print(\"Saved model to disk\") model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file: json_file.write(model_json)", "load_face_data import load_face_data from facetool import FaceTool def train_face_model(finetune = True): #===============custom parameters", "x = Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input,", "=============== # hidden_dim = 512 img_width, img_height = 224, 224 nb_class = 16", "metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic)", "Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) # Make predictions predictions_valid =", "X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train,", "Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) # Make", "predictions_valid) # ===============Save Model=============== print(\"Saved model to disk\") model_json = custom_vgg_model.to_json() with open(save_model_path,", "= custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file: json_file.write(model_json) # serialize weights to HDF5", "load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True,", "opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic", "prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model,", "= custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers): if ind <=", "from one_face_predict import prdict_one_face from load_face_data import load_face_data from facetool import FaceTool def", "json_file: json_file.write(model_json) # serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model,", "print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg') print", "optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad = optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta( )", "Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers):", "Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if", "3)) # print('----------------After Add finetune layers----------------') # for l in vgg_model.layers: # print('Name", "= optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic =", "# You can Try different optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK #", "face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg') print Face_Label_Dic[face_index] if", "optimizers from keras.preprocessing.image import ImageDataGenerator from vggface import VGGFace from sklearn.metrics import log_loss", "# print('----------------After Add finetune layers----------------') # for l in vgg_model.layers: # print('Name ',", "ind, l in enumerate(all_layers): if ind <= pool5_index: l.trainable = False # all_layers[:pool5_index].trainable", "out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if finetune: # print('----------------After", "out) if finetune: # print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5'))", "name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model =", "'trainable' ,l.trainable,'index',ind) # Train your model as usual. # You can Try different", "l.trainable = False # all_layers[:pool5_index].trainable = False # for ind, l in enumerate(all_layers):", "face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index", "Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss score score =", "= Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax',", "Add finetune layers----------------') # for l in vgg_model.layers: # print('Name ', l.name, 'trainable'", "import load_face_data from facetool import FaceTool def train_face_model(finetune = True): #===============custom parameters ===============", "= 512 img_width, img_height = 224, 224 nb_class = 16 One_Class_Train_MAX = 30", "10 nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch =", "\"w\") as json_file: json_file.write(model_json) # serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index", "# for l in vgg_model.layers: # print('Name ', l.name, 'trainable' ,l.trainable) last_layer =", "predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss score score = log_loss(Y_valid, predictions_valid)", "= Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if finetune: # print('----------------After Disable", "if ind <= pool5_index: l.trainable = False # all_layers[:pool5_index].trainable = False # for", "FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid),", "name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if finetune: # print('----------------After Disable Trainable----------------') all_layers =", "One_Class_Valid_MAX nb_epoch = 10 batch_size = 16 train_data_dir = 'data/train' validation_data_dir = 'data/validation'", "Input from keras import optimizers from keras.preprocessing.image import ImageDataGenerator from vggface import VGGFace", "'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg') print Face_Label_Dic[face_index] if __name__ == '__main__':", "'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # =============== NN", "nb_class * One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch = 10 batch_size =", "# Cross-entropy loss score score = log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved model", "import VGGFace from sklearn.metrics import log_loss from one_face_predict import prdict_one_face from load_face_data import", "for ind, l in enumerate(all_layers): if ind <= pool5_index: l.trainable = False #", "disk\") model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file: json_file.write(model_json) # serialize weights", "activation='relu', name='fc7')(x) out = Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if finetune:", "ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1,", "You can Try different optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad", "parameters =============== # hidden_dim = 512 img_width, img_height = 224, 224 nb_class =", "Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x) out =", "Try different optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad = optimizers.Adagrad(", "False # for ind, l in enumerate(all_layers): # print('Name ', l.name, 'trainable' ,l.trainable,'index',ind)", "l in enumerate(all_layers): if ind <= pool5_index: l.trainable = False # all_layers[:pool5_index].trainable =", "= nb_class * One_Class_Valid_MAX nb_epoch = 10 batch_size = 16 train_data_dir = 'data/train'", "Cross-entropy loss score score = log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved model to", "# all_layers[:pool5_index].trainable = False # for ind, l in enumerate(all_layers): # print('Name ',", "keras.preprocessing.image import ImageDataGenerator from vggface import VGGFace from sklearn.metrics import log_loss from one_face_predict", "= './faceDB/face-index.json' # =============== NN =============== # vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3))", "optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad = optimizers.Adagrad( decay=1e-6) #", "model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\") as json_file: json_file.write(model_json) # serialize weights to", "with open(save_model_path, \"w\") as json_file: json_file.write(model_json) # serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) #", "vgg_model.layers: # print('Name ', l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer)", "print('----------------After Add finetune layers----------------') # for l in vgg_model.layers: # print('Name ', l.name,", "for ind, l in enumerate(all_layers): # print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) # Train", "= './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # =============== NN =============== # vgg_model = VGGFace(include_top=False,", "opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad = optimizers.Adagrad( decay=1e-6) # opt =", "# print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) # Train your model as usual. #", "save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # =============== NN ===============", "finetune layers----------------') # for l in vgg_model.layers: # print('Name ', l.name, 'trainable' ,l.trainable)", "= log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved model to disk\") model_json = custom_vgg_model.to_json()", "# Train your model as usual. # You can Try different optimizers #", "= custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers): if ind <= pool5_index: l.trainable =", "usual. # You can Try different optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK", "decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool", "Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning", "# Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) #", "* One_Class_Valid_MAX nb_epoch = 10 batch_size = 16 train_data_dir = 'data/train' validation_data_dir =", "Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) # Make predictions", "log_loss from one_face_predict import prdict_one_face from load_face_data import load_face_data from facetool import FaceTool", "l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu',", "ImageDataGenerator from vggface import VGGFace from sklearn.metrics import log_loss from one_face_predict import prdict_one_face", "'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x)", "Trainable----------------') all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers): if", "custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool =", "your model as usual. # You can Try different optimizers # opt =", "nb_epoch = 10 batch_size = 16 train_data_dir = 'data/train' validation_data_dir = 'data/validation' save_model_path", "'data/train' validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json'", "#===============custom parameters =============== # hidden_dim = 512 img_width, img_height = 224, 224 nb_class", "print('Name ', l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x =", "import prdict_one_face from load_face_data import load_face_data from facetool import FaceTool def train_face_model(finetune =", "Flatten, Dense, Input from keras import optimizers from keras.preprocessing.image import ImageDataGenerator from vggface", "= 'data/train' validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index =", "'./faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # =============== NN =============== # vgg_model = VGGFace(include_top=False, input_shape=(224,", "vggface import VGGFace from sklearn.metrics import log_loss from one_face_predict import prdict_one_face from load_face_data", "def train_face_model(finetune = True): #===============custom parameters =============== # hidden_dim = 512 img_width, img_height", "= 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10 nb_train_samples = nb_class * One_Class_Train_MAX", "30 One_Class_Valid_MAX = 10 nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples = nb_class *", "custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss score score = log_loss(Y_valid, predictions_valid) # ===============Save", "verbose=1) # Cross-entropy loss score score = log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved", "= prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg') print Face_Label_Dic[face_index] if __name__", "===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index]", "validation_data=(X_valid, Y_valid), ) # Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy", "different optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad = optimizers.Adagrad( decay=1e-6)", "custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/') ftool = FaceTool() ftool.write_json(save_face_index,Face_Label_Dic) #", "# ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print", "adagrad = optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6)", "as json_file: json_file.write(model_json) # serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index =", "# serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print", ") # Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss score", "predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss score score = log_loss(Y_valid,", "batch_size=batch_size, verbose=1) # Cross-entropy loss score score = log_loss(Y_valid, predictions_valid) # ===============Save Model===============", "from keras.preprocessing.image import ImageDataGenerator from vggface import VGGFace from sklearn.metrics import log_loss from", "import log_loss from one_face_predict import prdict_one_face from load_face_data import load_face_data from facetool import", ") opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid,", "# Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size, verbose=1) # Cross-entropy loss score score", "keras.layers import Flatten, Dense, Input from keras import optimizers from keras.preprocessing.image import ImageDataGenerator", "x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu', name='fc7')(x)", "l in vgg_model.layers: # print('Name ', l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x", "json_file.write(model_json) # serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg')", "serialize weights to HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index]", "NN =============== # vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After Add finetune", "custom_vgg_model = Model(vgg_model.input, out) if finetune: # print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers", "= 224, 224 nb_class = 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10 nb_train_samples", "# opt = optimizers.SGD(lr=1e-5, decay=1e-6) #OK # adagrad = optimizers.Adagrad( decay=1e-6) # opt", "from sklearn.metrics import log_loss from one_face_predict import prdict_one_face from load_face_data import load_face_data from", "from keras.layers import Flatten, Dense, Input from keras import optimizers from keras.preprocessing.image import", "from vggface import VGGFace from sklearn.metrics import log_loss from one_face_predict import prdict_one_face from", "vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x = Dense(hidden_dim, activation='relu', name='fc6')(x) x = Dense(hidden_dim, activation='relu',", "model as usual. # You can Try different optimizers # opt = optimizers.SGD(lr=1e-5,", "= prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg') print Face_Label_Dic[face_index] face_index =", "Train your model as usual. # You can Try different optimizers # opt", "'./faceDB/face-index.json' # =============== NN =============== # vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3)) #", "# print('Name ', l.name, 'trainable' ,l.trainable) last_layer = vgg_model.get_layer('pool5').output x = Flatten(name='flatten')(last_layer) x", "===============Save Model=============== print(\"Saved model to disk\") model_json = custom_vgg_model.to_json() with open(save_model_path, \"w\") as", "nb_class = 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10 nb_train_samples = nb_class *", "sklearn.metrics import log_loss from one_face_predict import prdict_one_face from load_face_data import load_face_data from facetool", "Dense(nb_class, activation='softmax', name='fc8')(x) custom_vgg_model = Model(vgg_model.input, out) if finetune: # print('----------------After Disable Trainable----------------')", "train_face_model(finetune = True): #===============custom parameters =============== # hidden_dim = 512 img_width, img_height =", "custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l in enumerate(all_layers): if ind <= pool5_index: l.trainable = False", "pool5_index: l.trainable = False # all_layers[:pool5_index].trainable = False # for ind, l in", "enumerate(all_layers): # print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) # Train your model as usual.", "score score = log_loss(Y_valid, predictions_valid) # ===============Save Model=============== print(\"Saved model to disk\") model_json", "=============== NN =============== # vgg_model = VGGFace(include_top=False, input_shape=(224, 224, 3)) # print('----------------After Add", "HDF5 custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model,", "custom_vgg_model.save_weights(save_model_h5) # ===============Test=============== face_index = prdict_one_face(custom_vgg_model, 'data/test/1.jpg') print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/2.jpg')", "16 train_data_dir = 'data/train' validation_data_dir = 'data/validation' save_model_path = './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5'", "True): #===============custom parameters =============== # hidden_dim = 512 img_width, img_height = 224, 224", "nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), ) # Make predictions predictions_valid = custom_vgg_model.predict(X_valid, batch_size=batch_size,", "optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid,", "# print('----------------After Disable Trainable----------------') all_layers = custom_vgg_model.layers pool5_index = custom_vgg_model.layers.index(custom_vgg_model.get_layer('pool5')) for ind, l", "nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples = nb_class * One_Class_Valid_MAX nb_epoch = 10", "= './faceDB/face-model.json' save_model_h5 = './faceDB/face-model.h5' save_face_index = './faceDB/face-index.json' # =============== NN =============== #", "optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) custom_vgg_model.summary() X_train, Y_train, X_valid, Y_valid, Face_Label_Dic = load_face_data('data/')", "print Face_Label_Dic[face_index] face_index = prdict_one_face(custom_vgg_model, 'data/test/3.jpg') print Face_Label_Dic[face_index] if __name__ == '__main__': train_face_model(False)", ",l.trainable,'index',ind) # Train your model as usual. # You can Try different optimizers", "ftool.write_json(save_face_index,Face_Label_Dic) # Start Fine-tuning custom_vgg_model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, shuffle=True, verbose=1, validation_data=(X_valid, Y_valid), )", "from keras.engine import Model from keras.layers import Flatten, Dense, Input from keras import", "16 One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10 nb_train_samples = nb_class * One_Class_Train_MAX nb_validation_samples", "= False # all_layers[:pool5_index].trainable = False # for ind, l in enumerate(all_layers): #", "224, 224 nb_class = 16 One_Class_Train_MAX = 30 One_Class_Valid_MAX = 10 nb_train_samples =", "l in enumerate(all_layers): # print('Name ', l.name, 'trainable' ,l.trainable,'index',ind) # Train your model", "Dense, Input from keras import optimizers from keras.preprocessing.image import ImageDataGenerator from vggface import", "as usual. # You can Try different optimizers # opt = optimizers.SGD(lr=1e-5, decay=1e-6)", "optimizers.Adagrad( decay=1e-6) # opt = optimizers.Adadelta( ) opt = optimizers.Adam(lr=1e-5, decay=1e-6) custom_vgg_model.compile(optimizer=opt, loss='categorical_crossentropy'," ]
[]
[ "import ABC, abstractmethod class CustomerServiceInterface(ABC): @abstractmethod def get_all_accounts_for_user(self, id: int) -> list: pass", "abc import ABC, abstractmethod class CustomerServiceInterface(ABC): @abstractmethod def get_all_accounts_for_user(self, id: int) -> list:", "from abc import ABC, abstractmethod class CustomerServiceInterface(ABC): @abstractmethod def get_all_accounts_for_user(self, id: int) ->" ]
[ "pathlib from typing import ( Iterable ) CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [", "import pathlib from typing import ( Iterable ) CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS =", "CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]: for", "= \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]: for val", ") CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]:", "from typing import ( Iterable ) CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\"", "typing import ( Iterable ) CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ]", "Iterable ) CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ] def get_contracts() ->", "( Iterable ) CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ] def get_contracts()", "= [ \"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]: for val in CONTRACTS: yield", "\"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]: for val in CONTRACTS: yield pathlib.Path(CONTRACTS_ROOT) /", "[ \"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]: for val in CONTRACTS: yield pathlib.Path(CONTRACTS_ROOT)", "] def get_contracts() -> Iterable[pathlib.Path]: for val in CONTRACTS: yield pathlib.Path(CONTRACTS_ROOT) / pathlib.Path(val)", "\"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]: for val in", "import ( Iterable ) CONTRACTS_ROOT = \"./scripts/benchmark/contract_data/\" CONTRACTS = [ \"erc20.sol\" ] def", "CONTRACTS = [ \"erc20.sol\" ] def get_contracts() -> Iterable[pathlib.Path]: for val in CONTRACTS:" ]
[ "pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids:", "none of the primary keys of the table', dict( sql='SELECT normal_col1 FROM {0};',", "response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table result data on Commit", "True, False] )), ('When selecting oids without primary keys (table with oids)', dict(", "False] )) ] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection()", "utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable resultsets require pyscopg 2.8", "\"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data)", "The pgAdmin Development Team # This software is released under the PostgreSQL Licence", "primary keys of the table', dict( sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1':", "detection of whether the query result-set is updatable. \"\"\" scenarios = [ ('When", "keys of the table', dict( sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4',", "sql='SELECT *, oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True,", "AS today; SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False]", "OIDs are not supported starting ' 'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version =", "_check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids = response_data['data']['has_oids']", "\"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming a normal column', dict(", "import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from", "self.trans_id = str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response =", "of the table', dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )),", "'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False] )), ('When selecting", "TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test the detection of whether the query result-set", "('When renaming a primary key', dict( sql='SELECT pk_col1 as some_col, pk_col2 FROM \"{0}\";',", "pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none of the", "if driver_version < 2.8: self.skipTest('Updatable resultsets require pyscopg 2.8 or later') self.db_id =", "ON COMMIT DROP AS SELECT CURRENT_DATE AS today; SELECT * FROM {0};''', expected_primary_keys=None,", "db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"] == \"Database connected.\": raise", "\\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP TABLE", "is released under the PostgreSQL Licence # ########################################################################## import json import random from", "{0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When selecting none of the", "EXISTS {0}; CREATE TEMPORARY TABLE {0} ON COMMIT DROP AS SELECT CURRENT_DATE AS", "from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\", "oids)', dict( sql='SELECT *, oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' },", "start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys)", "'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When selecting a normal", "DROP TABLE IF EXISTS {0}; CREATE TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1", "class will test the query result-set for temporary tables \"\"\" scenarios = [", "+= ' WITH OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset):", "import parent_node_dict from regression.python_test_utils import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query", "from regression.python_test_utils import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime", "expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming a normal column', dict( sql='SELECT", "self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def", "database_utils from pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import test_utils", "connected.\": raise Exception(\"Could not connect to the database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1,", "self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for col, expected_is_editable in \\ zip(columns_info,", "create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test the query result-set for temporary", "CURRENT_DATE AS today; SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False,", "the table', dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When", "utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url = \\", "120000 and self.table_has_oids: self.skipTest('Tables with OIDs are not supported starting ' 'PostgreSQL 12')", "table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying", "the query result-set for temporary tables \"\"\" scenarios = [ ('When selecting all", "True, True] )), ('When selecting a normal column twice', dict( sql='SELECT pk_col1, pk_col2,", "(C) 2013 - 2020, The pgAdmin Development Team # This software is released", "self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def", "key name', dict( sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1':", "(table with oids)', dict( sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False,", "normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql +=", "dict( sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2':", "regression import parent_node_dict from regression.python_test_utils import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import", "{0}; CREATE TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY", "expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False] )), ('When selecting oids without primary", "the primary keys of the table', dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False,", "('When renaming a normal column to a primary key name', dict( sql='SELECT normal_col1", "table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )), ('When selecting a non-table column', dict( sql='SELECT", "table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000,", "expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When selecting none of the primary", "\"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )),", "200) def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False):", "later') self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"]", "False] )), ('When selecting primary keys and oids (table with oids)', dict( sql='SELECT", "selecting primary keys and oids (table with oids)', dict( sql='SELECT *, oid FROM", "if not db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could not connect to the database.\")", "expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )),", "* FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def", "a normal column twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={", "today; SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] ))", "FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none of the primary", "dict(sql=''' DROP TABLE IF EXISTS {0}; CREATE TEMPORARY TABLE {0} ON COMMIT DROP", "= float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable resultsets require pyscopg 2.8 or later')", "('When selecting primary keys and oids (table with oids)', dict( sql='SELECT *, oid", "def _create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP TABLE IF EXISTS {0}; CREATE TABLE", "result-set is updatable. \"\"\" scenarios = [ ('When selecting all columns of the", "FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When selecting none of", "_check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo']", "self.skipTest('Updatable resultsets require pyscopg 2.8 or later') self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self,", "as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True,", "= database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"] == \"Database", "] def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table result", "DROP TABLE IF EXISTS {0}; CREATE TEMPORARY TABLE {0} ON COMMIT DROP AS", "table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )), ('When selecting all primary keys of the", "updatable. \"\"\" scenarios = [ ('When selecting all columns of the table', dict(", "FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )),", "\\ database_utils from pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import", "self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): #", "keys of the table', dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False]", "self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"]", "of the table', dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )),", "\"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data =", "starting ' 'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version <", "str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code,", "'/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url", "of the primary keys of the table', dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None,", "from pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import test_utils as", "' WITH OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\"", "- PostgreSQL Tools # # Copyright (C) 2013 - 2020, The pgAdmin Development", "tables \"\"\" scenarios = [ ('When selecting all columns of the Temporary table,", "expected_cols_is_editable=[False, True, True] )), ('When selecting a normal column twice', dict( sql='SELECT pk_col1,", "to a primary key name', dict( sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2 FROM", "poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys,", "of the primary keys of the table', dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None,", "self._check_editable_columns(response_data) def tearDown(self): # Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql", "database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data = \\", "expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )),", "json import random from pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils from pgadmin.utils.route import", ")), ('When selecting a normal column twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1", "self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self,", "'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False] )),", "query result-set for temporary tables \"\"\" scenarios = [ ('When selecting all columns", "= \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def _check_primary_keys(self, response_data):", "dict( sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False,", "dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a", "self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id)", "result-set for temporary tables \"\"\" scenarios = [ ('When selecting all columns of", "KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql += ' WITH OIDS;' else: create_sql", "self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000", "VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql += ' WITH OIDS;'", ")), ('When selecting none of the primary keys or oids (table with oids)',", "VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql += '", "some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming a", "create_sql = \"\"\" DROP TABLE IF EXISTS {0}; CREATE TABLE {0}( pk_col1 SERIAL,", ") \"\"\".format(self.test_table_name) if table_has_oids: create_sql += ' WITH OIDS;' else: create_sql += ';'", "result data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result']", "keys of the table', dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False]", "pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql += ' WITH OIDS;' else: create_sql +=", ")), ('When renaming a normal column to a primary key name', dict( sql='SELECT", "EXISTS {0}; CREATE TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR,", "def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def", "}, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When selecting a normal column twice',", "import json import random from pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils from pgadmin.utils.route", "|| normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True,", "software is released under the PostgreSQL Licence # ########################################################################## import json import random", "expected_cols_is_editable=[False] )), ('When selecting none of the primary keys of the table', dict(", ")) ] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool()", "IF EXISTS {0}; CREATE TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2", "'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )), ('When selecting", "= str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url)", "SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ]", "self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data):", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none of the primary keys of the", "dict( sql='SELECT oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True]", "'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When renaming", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When selecting a normal column twice', dict(", "'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )), ('When selecting a", "utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test the query result-set", "oids (table with oids)', dict( sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True,", "table, on commit drop', dict(sql=''' DROP TABLE IF EXISTS {0}; CREATE TEMPORARY TABLE", "pgAdmin 4 - PostgreSQL Tools # # Copyright (C) 2013 - 2020, The", "normal column twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1':", "setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self):", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )), ('When selecting a non-table column', dict(", "self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id) def", "9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200)", "= response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def", "}, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )), ('When selecting a non-table column',", "= '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP TABLE IF EXISTS {0};", "keys (table with oids)', dict( sql='SELECT oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True,", "table_has_oids: create_sql += ' WITH OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql)", "+ \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql()", "primary key name', dict( sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={", "'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )), ('When selecting a non-table", "sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none of", "self.server_id, self.db_id) if not db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could not connect to", "key', dict( sql='SELECT pk_col1 as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False,", "a primary key name', dict( sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2 FROM {0};',", "all primary keys of the table', dict( sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={", "pgAdmin Development Team # This software is released under the PostgreSQL Licence #", "'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When selecting primary keys and", "\"\"\" DROP TABLE IF EXISTS {0}; CREATE TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL,", "'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When renaming a normal", "sql='SELECT pk_col1 as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )),", "primary keys or oids (table with oids)', dict( sql='SELECT normal_col1, normal_col2 FROM {0};',", "table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a primary key', dict( sql='SELECT pk_col1 as some_col,", "= database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables with", "else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will", "the table', dict( sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4'", "execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def _check_primary_keys(self, response_data): primary_keys =", "'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When selecting primary", "expected_cols_is_editable=[True, True, True, True] )), ('When selecting all primary keys of the table',", "self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables with OIDs are", "normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True,", "dict( sql='SELECT pk_col1 as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False]", "TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test the query result-set for temporary tables \"\"\"", "= \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP", "{0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When", "SELECT CURRENT_DATE AS today; SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False,", "the detection of whether the query result-set is updatable. \"\"\" scenarios = [", "False] )), ('When renaming a normal column to a primary key name', dict(", "normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When selecting none", "pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False,", "self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables", "\"\"\" scenarios = [ ('When selecting all columns of the Temporary table, on", "whether the query result-set is updatable. \"\"\" scenarios = [ ('When selecting all", "from pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils from pgadmin.utils.route import BaseTestGenerator from regression", "expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When", "primary keys of the table', dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False,", "will test the query result-set for temporary tables \"\"\" scenarios = [ ('When", "response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for col, expected_is_editable in", "FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False]", "expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\", "all columns of the table', dict( sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1': 'int4',", "Development Team # This software is released under the PostgreSQL Licence # ##########################################################################", "# Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success,", "('When selecting none of the primary keys of the table', dict( sql='SELECT normal_col1", "self.table_has_oids: self.skipTest('Tables with OIDs are not supported starting ' 'PostgreSQL 12') driver_version =", "a normal column to a primary key name', dict( sql='SELECT normal_col1 as pk_col1,", "12') driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable resultsets", "'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting some", "('When selecting a non-table column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2 FROM", "= self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id)", "selecting all columns of the Temporary table, on commit drop', dict(sql=''' DROP TABLE", "the primary keys of the table', dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False,", "TABLE IF EXISTS {0}; CREATE TEMPORARY TABLE {0} ON COMMIT DROP AS SELECT", "the primary keys or oids (table with oids)', dict( sql='SELECT normal_col1, normal_col2 FROM", "True, True] )), ('When selecting none of the primary keys or oids (table", "normal column to a primary key name', dict( sql='SELECT normal_col1 as pk_col1, pk_col1,", "\"\"\" This class will test the query result-set for temporary tables \"\"\" scenarios", "test the detection of whether the query result-set is updatable. \"\"\" scenarios =", "table_has_oids=False): create_sql = \"\"\" DROP TABLE IF EXISTS {0}; CREATE TABLE {0}( pk_col1", "without primary keys (table with oids)', dict( sql='SELECT oid, normal_col1, normal_col2 FROM {0};',", "on commit drop', dict(sql=''' DROP TABLE IF EXISTS {0}; CREATE TEMPORARY TABLE {0}", "self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for result_data, expected_is_editable in \\", "########################################################################## import json import random from pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils from", "] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls()", "}, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When selecting primary keys and oids", "database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables with OIDs", "expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name", "self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self):", "with OIDs are not supported starting ' 'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version", "response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self,", "= \"\"\" DROP TABLE IF EXISTS {0}; CREATE TABLE {0}( pk_col1 SERIAL, pk_col2", "renaming a normal column to a primary key name', dict( sql='SELECT normal_col1 as", "# This software is released under the PostgreSQL Licence # ########################################################################## import json", "'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When selecting a normal column", "'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False] )), ('When selecting oids", "False] )), ('When selecting oids without primary keys (table with oids)', dict( sql='SELECT", "SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if", "expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When", "expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none of the primary keys of", "'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )), ('When selecting all", "sql='SELECT pk_col1, pk_col2, normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4'", "self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect the database", "self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data", "expected_cols_is_editable=[False, False] )), ('When renaming a normal column', dict( sql='SELECT pk_col1, pk_col2, normal_col1", "resultsets require pyscopg 2.8 or later') self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP,", "scenarios = [ ('When selecting all columns of the Temporary table, on commit", "table', dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting", "keys and oids (table with oids)', dict( sql='SELECT *, oid FROM {0};', expected_primary_keys={", "sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4'", "def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version =", "# # pgAdmin 4 - PostgreSQL Tools # # Copyright (C) 2013 -", "# # Copyright (C) 2013 - 2020, The pgAdmin Development Team # This", "def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table result data", "FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def setUp(self): self.test_table_name =", "'/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP TABLE IF", "normal_col1 as pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' },", "TEMPORARY TABLE {0} ON COMMIT DROP AS SELECT CURRENT_DATE AS today; SELECT *", ")) ] def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table", "_initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"]", "*, oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True,", "WITH OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This", "pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False,", "none of the primary keys or oids (table with oids)', dict( sql='SELECT normal_col1,", "database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could not", "{0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True,", "self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table result data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data)", "sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' },", "with oids)', dict( sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False]", "selecting oids without primary keys (table with oids)', dict( sql='SELECT oid, normal_col1, normal_col2", "Licence # ########################################################################## import json import random from pgadmin.browser.server_groups.servers.databases.tests import utils as \\", "def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql", "('When selecting all primary keys of the table', dict( sql='SELECT pk_col1, pk_col2 FROM", "True] )), ('When selecting some of the primary keys of the table', dict(", "self.db_id) if not db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could not connect to the", "oids)', dict( sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] ))", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When renaming a normal column to a", "}, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When renaming a normal column to", "{0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True]", "DROP AS SELECT CURRENT_DATE AS today; SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]],", "TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2)", "[ ('When selecting all columns of the table', dict( sql='SELECT * FROM {0};',", "' 'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8:", "def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable):", "('When selecting a normal column twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM", "db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could not connect to the database.\") def _initialize_query_tool(self):", "column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2':", "float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable resultsets require pyscopg 2.8 or later') self.db_id", "database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id,", "\\ import execute_query from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will", "True] )), ('When selecting none of the primary keys or oids (table with", "normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def setUp(self):", "table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False] )), ('When selecting oids without primary keys", "expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self): response_data = self._execute_select_sql()", "as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator):", "}, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )), ('When selecting all primary keys", "Tools # # Copyright (C) 2013 - 2020, The pgAdmin Development Team #", "database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if", "True] )), ('When selecting all primary keys of the table', dict( sql='SELECT pk_col1,", "def _check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info =", "raise Exception(\"Could not connect to the database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999))", "_create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP TABLE IF EXISTS {0}; CREATE TABLE {0}(", "sql='SELECT oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )),", "renaming a primary key', dict( sql='SELECT pk_col1 as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None,", "response_data): results_column_data = response_data['data']['result'] for result_data, expected_is_editable in \\ zip(results_column_data, self.expected_results_column_data): self.assertEquals(result_data, expected_is_editable)", "import random from pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils from pgadmin.utils.route import BaseTestGenerator", "oids (table with oids)', dict( sql='SELECT *, oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4',", "parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables with OIDs are not supported", "pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils from pgadmin.utils.route import BaseTestGenerator from regression import", "of whether the query result-set is updatable. \"\"\" scenarios = [ ('When selecting", "'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )), ('When selecting all primary", "True, True, True] )), ('When selecting all primary keys of the table', dict(", "column to a primary key name', dict( sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2", "the database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data =", "True, False] )), ('When selecting a non-table column', dict( sql='SELECT pk_col1, pk_col2, normal_col1", "tearDown(self): # Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name)", "from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This", "self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for col, expected_is_editable in \\", "expected_cols_is_editable=[True, True, False] )), ('When selecting primary keys and oids (table with oids)',", "the Temporary table, on commit drop', dict(sql=''' DROP TABLE IF EXISTS {0}; CREATE", "'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )), ('When", "pk_col2, normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False,", "= [ ('When selecting all columns of the table', dict( sql='SELECT * FROM", "create_sql += ' WITH OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class", "\"\"\" This class will test the detection of whether the query result-set is", "dict( sql='SELECT pk_col1, pk_col2, normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2':", "the table', dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When", "True] )), ('When selecting a normal column twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1,", "are not supported starting ' 'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2]))", "parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >=", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When selecting primary keys and oids (table", "is updatable. \"\"\" scenarios = [ ('When selecting all columns of the table',", "selecting all columns of the table', dict( sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1':", "and oids (table with oids)', dict( sql='SELECT *, oid FROM {0};', expected_primary_keys={ 'pk_col1':", "'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )), ('When", "response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids,", "normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql += ' WITH", "a primary key', dict( sql='SELECT pk_col1 as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False,", "pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import test_utils as utils", "response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def _check_primary_keys(self,", ")), ('When renaming a primary key', dict( sql='SELECT pk_col1 as some_col, pk_col2 FROM", "BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils", "2020, The pgAdmin Development Team # This software is released under the PostgreSQL", "columns of the table', dict( sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2':", "table result data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data =", "a non-table column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1':", "PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql += ' WITH OIDS;' else:", "supported starting ' 'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version", "'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable", "_initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql =", ")), ('When selecting oids without primary keys (table with oids)', dict( sql='SELECT oid,", "AS SELECT CURRENT_DATE AS today; SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False,", "= '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self):", ")), ('When selecting all primary keys of the table', dict( sql='SELECT pk_col1, pk_col2", "response_data['data']['colinfo'] for col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info", "utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could not connect", "oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When", "expected_cols_is_editable=[False] )) ] def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary", "import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test the detection of whether", "will test the detection of whether the query result-set is updatable. \"\"\" scenarios", "not supported starting ' 'PostgreSQL 12') driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if", "True, False] )), ('When renaming a normal column to a primary key name',", "'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting some of", "{0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none of the primary keys", "execute_query from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test the", "Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for result_data, expected_is_editable", "{0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )),", "(table with oids)', dict( sql='SELECT oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True,", "self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id)", "= response_data['data']['colinfo'] for col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self):", "';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test the query", "self.assertEquals(is_success, True) return response_data def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def", "{0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False]", "import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime import date", "self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql = \"\"\"", "(table with oids)', dict( sql='SELECT *, oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2':", "self.expected_primary_keys) def _check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info", "}, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting some of the primary keys", "########################################################################## # # pgAdmin 4 - PostgreSQL Tools # # Copyright (C) 2013", "as \\ database_utils from pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict from regression.python_test_utils", "scenarios = [ ('When selecting all columns of the table', dict( sql='SELECT *", "normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False,", "selecting a non-table column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2 FROM {0};',", "column twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4',", "the query result-set is updatable. \"\"\" scenarios = [ ('When selecting all columns", "selecting none of the primary keys or oids (table with oids)', dict( sql='SELECT", "and self.table_has_oids: self.skipTest('Tables with OIDs are not supported starting ' 'PostgreSQL 12') driver_version", "of the table', dict( sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4'", "runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect the", "response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect the database database_utils.disconnect_database(self,", "self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids)", ">= 120000 and self.table_has_oids: self.skipTest('Tables with OIDs are not supported starting ' 'PostgreSQL", "database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"] == \"Database connected.\":", "sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a primary", "pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class", "This class will test the query result-set for temporary tables \"\"\" scenarios =", "table', dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming", "FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True,", "query result-set is updatable. \"\"\" scenarios = [ ('When selecting all columns of", "self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"] ==", "\"Database connected.\": raise Exception(\"Could not connect to the database.\") def _initialize_query_tool(self): self.trans_id =", "require pyscopg 2.8 or later') self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id,", "import utils as \\ database_utils from pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict", "{0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When", "True, True, False] )), ('When selecting oids without primary keys (table with oids)',", "primary keys and oids (table with oids)', dict( sql='SELECT *, oid FROM {0};',", "table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When renaming a normal column to a primary", "= parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables with OIDs are not", "query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys']", "# pgAdmin 4 - PostgreSQL Tools # # Copyright (C) 2013 - 2020,", "False] )), ('When selecting a non-table column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 ||", "dict( sql='SELECT pk_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none", "Team # This software is released under the PostgreSQL Licence # ########################################################################## import", "9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data)", "database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester,", "PostgreSQL Tools # # Copyright (C) 2013 - 2020, The pgAdmin Development Team", "pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True]", "driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable resultsets require pyscopg 2.8 or", "_execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success,", "date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test the detection of whether the", "False] )), ('When renaming a normal column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 as", "self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test the query result-set for", "keys or oids (table with oids)', dict( sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None,", "driver_version = utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable resultsets require", "is_success, response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def", "= utils.get_driver_version() driver_version = float('.'.join(driver_version.split('.')[:2])) if driver_version < 2.8: self.skipTest('Updatable resultsets require pyscopg", "expected_cols_is_editable=[True, True, True, False] )), ('When selecting a non-table column', dict( sql='SELECT pk_col1,", "dict( sql='SELECT *, oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True,", "_check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for result_data, expected_is_editable in \\ zip(results_column_data, self.expected_results_column_data): self.assertEquals(result_data,", "\"\"\".format(self.test_table_name) if table_has_oids: create_sql += ' WITH OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server,", "IF EXISTS {0}; CREATE TEMPORARY TABLE {0} ON COMMIT DROP AS SELECT CURRENT_DATE", "test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime import date class", "TABLE {0} ON COMMIT DROP AS SELECT CURRENT_DATE AS today; SELECT * FROM", "True, True] )), ('When selecting all primary keys of the table', dict( sql='SELECT", "'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False] )), ('When", "CREATE TEMPORARY TABLE {0} ON COMMIT DROP AS SELECT CURRENT_DATE AS today; SELECT", "of the primary keys or oids (table with oids)', dict( sql='SELECT normal_col1, normal_col2", "= database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could", "'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When renaming a", "in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name =", "COMMIT DROP AS SELECT CURRENT_DATE AS today; SELECT * FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime(", "\"\"\" scenarios = [ ('When selecting all columns of the table', dict( sql='SELECT", "not db_con[\"info\"] == \"Database connected.\": raise Exception(\"Could not connect to the database.\") def", "utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\"", "}, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False] )), ('When selecting oids without", "('When selecting none of the primary keys or oids (table with oids)', dict(", "4 - PostgreSQL Tools # # Copyright (C) 2013 - 2020, The pgAdmin", "sql = self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True)", "('When selecting all columns of the Temporary table, on commit drop', dict(sql=''' DROP", "normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When selecting", "'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When renaming a normal column", "for col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info =", "True, False] )), ('When selecting primary keys and oids (table with oids)', dict(", "FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming a normal column',", "runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table result data on", "of the table', dict( sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2':", "parent_node_dict from regression.python_test_utils import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from", "'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, False] )), ('When selecting", "= self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return", "data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for", "Verifying Temporary table result data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data):", "= [ ('When selecting all columns of the Temporary table, on commit drop',", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming a normal column', dict( sql='SELECT pk_col1,", "response_data): columns_info = response_data['data']['colinfo'] for col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable)", "selecting some of the primary keys of the table', dict( sql='SELECT pk_col2 FROM", "[ ('When selecting all columns of the Temporary table, on commit drop', dict(sql='''", "expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True, True, True, False]", "expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When selecting", "True, True, False] )), ('When selecting a non-table column', dict( sql='SELECT pk_col1, pk_col2,", "expected_cols_is_editable=[False, True, True] )), ('When selecting none of the primary keys or oids", "{0} ON COMMIT DROP AS SELECT CURRENT_DATE AS today; SELECT * FROM {0};''',", "class will test the detection of whether the query result-set is updatable. \"\"\"", "True) return response_data def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self,", "selecting a normal column twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};',", "This software is released under the PostgreSQL Licence # ########################################################################## import json import", "\\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url) self.assertEquals(is_success, True) return response_data def _check_primary_keys(self, response_data): primary_keys", "driver_version < 2.8: self.skipTest('Updatable resultsets require pyscopg 2.8 or later') self.db_id = database_info[\"db_id\"]", "drop', dict(sql=''' DROP TABLE IF EXISTS {0}; CREATE TEMPORARY TABLE {0} ON COMMIT", "columns_info = response_data['data']['colinfo'] for col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def", "_initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response", "== \"Database connected.\": raise Exception(\"Could not connect to the database.\") def _initialize_query_tool(self): self.trans_id", "pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming a normal", "sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False,", "col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1]", "temporary tables \"\"\" scenarios = [ ('When selecting all columns of the Temporary", "connect to the database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format(", "normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True,", "self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for result_data, expected_is_editable in \\ zip(results_column_data,", "{0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\"", "FROM {0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self):", "pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True,", "= response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for col, expected_is_editable", "from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test the detection", "class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test the detection of whether the query", "import execute_query from datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test", "normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True,", "* FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True,", "response = self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url =", "self.skipTest('Tables with OIDs are not supported starting ' 'PostgreSQL 12') driver_version = utils.get_driver_version()", "renaming a normal column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 as some_col FROM \"{0}\";',", "self.server_id, self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester, query=sql,", "dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4'", "('When selecting all columns of the table', dict( sql='SELECT * FROM {0};', expected_primary_keys={", "pk_col1, pk_col2, normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' },", "or later') self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not", "test the query result-set for temporary tables \"\"\" scenarios = [ ('When selecting", "primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids)", "normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def setUp(self): self.test_table_name", "return response_data def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data):", "response_data def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids", "selecting all primary keys of the table', dict( sql='SELECT pk_col1, pk_col2 FROM {0};',", "('When selecting oids without primary keys (table with oids)', dict( sql='SELECT oid, normal_col1,", "pk_col1, pk_col2, normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False,", "('When selecting some of the primary keys of the table', dict( sql='SELECT pk_col2", "self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url =", "expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data)", "'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When selecting primary keys", "a normal column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={", "or oids (table with oids)', dict( sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False,", "FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )),", "table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming a normal column', dict( sql='SELECT pk_col1, pk_col2,", "2.8: self.skipTest('Updatable resultsets require pyscopg 2.8 or later') self.db_id = database_info[\"db_id\"] db_con =", "url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200) def", "Temporary table result data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data", "to the database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id,", "sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def", "- 2020, The pgAdmin Development Team # This software is released under the", "columns of the Temporary table, on commit drop', dict(sql=''' DROP TABLE IF EXISTS", "selecting none of the primary keys of the table', dict( sql='SELECT normal_col1 FROM", "expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When selecting none of the primary keys", "+= ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test the", "self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables with OIDs are not supported starting '", "table_has_oids=False, expected_cols_is_editable=[False] )), ('When selecting none of the primary keys of the table',", "str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data)", "Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for result_data, expected_is_editable in", "Copyright (C) 2013 - 2020, The pgAdmin Development Team # This software is", "2.8 or later') self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if", "# Copyright (C) 2013 - 2020, The pgAdmin Development Team # This software", "self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self):", "This class will test the detection of whether the query result-set is updatable.", "Temporary table, on commit drop', dict(sql=''' DROP TABLE IF EXISTS {0}; CREATE TEMPORARY", "expected_cols_is_editable=[False] )), ('When renaming a primary key', dict( sql='SELECT pk_col1 as some_col, pk_col2", "has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for col,", "pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name)", "sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True,", "def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect", "Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data", "on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for result_data,", ")), ('When renaming a normal column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 as some_col", "some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True,", "the database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP,", "datetime import date class TestQueryUpdatableResultset(BaseTestGenerator): \"\"\" This class will test the detection of", "with oids)', dict( sql='SELECT oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False,", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a primary key', dict( sql='SELECT pk_col1 as", "with oids)', dict( sql='SELECT *, oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4'", "commit drop', dict(sql=''' DROP TABLE IF EXISTS {0}; CREATE TEMPORARY TABLE {0} ON", "# Verifying Temporary table result data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def _check_results_column_data(self,", "expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting", "expected_cols_is_editable=[True, True] )), ('When selecting some of the primary keys of the table',", "not connect to the database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url =", "class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test the query result-set for temporary tables", "self.poll_url = '/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP TABLE IF EXISTS", "('When renaming a normal column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 as some_col FROM", "name', dict( sql='SELECT normal_col1 as pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4',", "expected_cols_is_editable=[True, True, True, True, False] )), ('When selecting oids without primary keys (table", "= database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and", "FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a primary key', dict(", "= \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data", "table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When selecting a normal column twice', dict( sql='SELECT", ")), ('When selecting a non-table column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2", "pk_col1 as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When", "table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting some of the primary keys of the", "if table_has_oids: create_sql += ' WITH OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name,", "if self.server_version >= 120000 and self.table_has_oids: self.skipTest('Tables with OIDs are not supported starting", "table_has_oids=False, expected_cols_is_editable=[True, True, False] )), ('When selecting primary keys and oids (table with", "Exception(\"Could not connect to the database.\") def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url", ")), ('When selecting primary keys and oids (table with oids)', dict( sql='SELECT *,", "pk_col2, normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False,", "pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False,", "'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting some of the", "{0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) )", "regression.python_test_utils import test_utils as utils from pgadmin.tools.sqleditor.tests.execute_query_test_utils \\ import execute_query from datetime import", "released under the PostgreSQL Licence # ########################################################################## import json import random from pgadmin.browser.server_groups.servers.databases.tests", "def _initialize_query_tool(self): self.trans_id = str(random.randint(1, 9999999)) url = '/datagrid/initialize/query_tool/{0}/{1}/{2}/{3}'.format( self.trans_id, utils.SERVER_GROUP, self.server_id, self.db_id)", "oids without primary keys (table with oids)', dict( sql='SELECT oid, normal_col1, normal_col2 FROM", "{0}; CREATE TEMPORARY TABLE {0} ON COMMIT DROP AS SELECT CURRENT_DATE AS today;", "pyscopg 2.8 or later') self.db_id = database_info[\"db_id\"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id)", "PostgreSQL Licence # ########################################################################## import json import random from pgadmin.browser.server_groups.servers.databases.tests import utils as", "as pk_col1, pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False,", "table', dict( sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' },", "expected_cols_is_editable=[False, False] )) ] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" + \\ str(random.randint(1000, 9999))", "of the Temporary table, on commit drop', dict(sql=''' DROP TABLE IF EXISTS {0};", "self.db_id) response = self.tester.post(url) self.assertEquals(response.status_code, 200) def _initialize_urls(self): self.start_query_tool_url = \\ '/sqleditor/query_tool/start/{0}'.format(self.trans_id) self.poll_url", "normal column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1':", "zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id", "\\ str(random.randint(1000, 9999)) self._initialize_database_connection() self._initialize_query_tool() self._initialize_urls() def runTest(self): self._create_test_table(table_has_oids=self.table_has_oids) response_data = self._execute_select_sql() self._check_primary_keys(response_data)", "column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 as some_col FROM \"{0}\";', expected_primary_keys={ 'pk_col1': 'int4',", "all columns of the Temporary table, on commit drop', dict(sql=''' DROP TABLE IF", "non-table column', dict( sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4',", "self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table result data on Commit Drop self._check_results_column_data(response_data)", "random from pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils from pgadmin.utils.route import BaseTestGenerator from", "TABLE IF EXISTS {0}; CREATE TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR,", "under the PostgreSQL Licence # ########################################################################## import json import random from pgadmin.browser.server_groups.servers.databases.tests import", "= parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version", "twice', dict( sql='SELECT pk_col1, pk_col2, normal_col1, normal_col1 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2':", "dict( sql='SELECT pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False,", "OIDS;' else: create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class", "self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id =", "pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True,", "'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When selecting a", "expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version", "expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self): response_data = self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) #", "oid FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[True, True,", "2013 - 2020, The pgAdmin Development Team # This software is released under", "oids)', dict( sql='SELECT oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=True, table_has_oids=True, expected_cols_is_editable=[False, True,", "= self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) self._check_editable_columns(response_data) def tearDown(self): # Disconnect the database database_utils.disconnect_database(self, self.server_id,", "def _check_primary_keys(self, response_data): primary_keys = response_data['data']['primary_keys'] self.assertEquals(primary_keys, self.expected_primary_keys) def _check_oids(self, response_data): has_oids =", "FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True,", "'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting some of the primary", "dict( sql='SELECT normal_col1, normal_col2 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ]", "< 2.8: self.skipTest('Updatable resultsets require pyscopg 2.8 or later') self.db_id = database_info[\"db_id\"] db_con", "= self._execute_select_sql() self._check_primary_keys(response_data) self._check_oids(response_data) # Verifying Temporary table result data on Commit Drop", "pk_col1, pk_col2 FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False,", "# ########################################################################## import json import random from pgadmin.browser.server_groups.servers.databases.tests import utils as \\ database_utils", "the table', dict( sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' },", "utils as \\ database_utils from pgadmin.utils.route import BaseTestGenerator from regression import parent_node_dict from", "some of the primary keys of the table', dict( sql='SELECT pk_col2 FROM {0};',", "expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a primary key', dict( sql='SELECT pk_col1", "'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True] )), ('When selecting", "FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, True, True]", "expected_primary_keys=None, expected_has_oids=False, table_has_oids=True, expected_cols_is_editable=[False, False] )) ] def setUp(self): self.test_table_name = \"test_for_updatable_resultset\" +", ")), ('When selecting none of the primary keys of the table', dict( sql='SELECT", "the PostgreSQL Licence # ########################################################################## import json import random from pgadmin.browser.server_groups.servers.databases.tests import utils", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True] )), ('When selecting some of the primary keys of", "sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' },", "self.db_id) def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url,", "def _check_results_column_data(self, response_data): results_column_data = response_data['data']['result'] for result_data, expected_is_editable in \\ zip(results_column_data, self.expected_results_column_data):", "SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1, pk_col2) ) \"\"\".format(self.test_table_name) if table_has_oids: create_sql", "response_data): has_oids = response_data['data']['has_oids'] self.assertEquals(has_oids, self.expected_has_oids) def _check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for", ")), ('When selecting some of the primary keys of the table', dict( sql='SELECT", "{0};''', expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self): response_data", "'/sqleditor/poll/{0}'.format(self.trans_id) def _create_test_table(self, table_has_oids=False): create_sql = \"\"\" DROP TABLE IF EXISTS {0}; CREATE", "table_has_oids=True, expected_cols_is_editable=[False, True, True] )), ('When selecting none of the primary keys or", "table', dict( sql='SELECT * FROM {0};', expected_primary_keys={ 'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False,", "def _execute_select_sql(self): sql = self.sql.format(self.test_table_name) is_success, response_data = \\ execute_query(tester=self.tester, query=sql, poll_url=self.poll_url, start_query_tool_url=self.start_query_tool_url)", "expected_cols_is_editable=[True, True, False] )), ('When renaming a normal column to a primary key", "def tearDown(self): # Disconnect the database database_utils.disconnect_database(self, self.server_id, self.db_id) def _execute_select_sql(self): sql =", "{0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a primary key', dict( sql='SELECT", "\\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'], expected_is_editable) def _initialize_database_connection(self): database_info = parent_node_dict[\"database\"][-1] self.db_name = database_info[\"db_name\"]", "expected_primary_keys=None, expected_results_column_data=[[date.today().strftime( \"%Y-%m-%d\")]], expected_has_oids=False, expected_results_column_is_editable=False, table_has_oids=False, expected_cols_is_editable=[False] )) ] def runTest(self): response_data =", "primary keys (table with oids)', dict( sql='SELECT oid, normal_col1, normal_col2 FROM {0};', expected_primary_keys=None,", "create_sql += ';' utils.create_table_with_query(self.server, self.db_name, create_sql) class TestTemporaryTable(TestQueryUpdatableResultset): \"\"\" This class will test", "normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4' }, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, False]", "_check_editable_columns(self, response_data): columns_info = response_data['data']['colinfo'] for col, expected_is_editable in \\ zip(columns_info, self.expected_cols_is_editable): self.assertEquals(col['is_editable'],", "primary key', dict( sql='SELECT pk_col1 as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False,", "primary keys of the table', dict( sql='SELECT normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False,", "for temporary tables \"\"\" scenarios = [ ('When selecting all columns of the", "as some_col, pk_col2 FROM \"{0}\";', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False, False] )), ('When renaming", "database_info[\"db_name\"] self.server_id = database_info[\"server_id\"] self.server_version = parent_node_dict[\"schema\"][-1][\"server_version\"] if self.server_version >= 120000 and self.table_has_oids:", "CREATE TABLE {0}( pk_col1 SERIAL, pk_col2 SERIAL, normal_col1 VARCHAR, normal_col2 VARCHAR, PRIMARY KEY(pk_col1,", "True, True, True, False] )), ('When selecting oids without primary keys (table with", "normal_col1 FROM {0};', expected_primary_keys=None, expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[False] )), ('When renaming a primary key',", "dict( sql='SELECT pk_col1, pk_col2, normal_col1 || normal_col2 FROM {0};', expected_primary_keys={'pk_col1': 'int4', 'pk_col2': 'int4'", "expected_has_oids=False, table_has_oids=False, expected_cols_is_editable=[True, True, True, True] )), ('When selecting all primary keys of", "self._check_oids(response_data) # Verifying Temporary table result data on Commit Drop self._check_results_column_data(response_data) self._check_editable_columns(response_data) def" ]
[ "as np import pickle as pkl def function_generator(init_num): seq = np.array([], dtype='int') n", "else: next_number = (3*n)+1 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) n", "= next_number return seq output_seq_data = [] output_seq_length = [] x_train = []", "= n/2 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) if next_number==1: break", "np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length) # print(x_train[26]) # np.save('generated_data.npy', gen_data) # np.save('x_train.npy',", "n) while True: if ((n%2)==0): next_number = n/2 next_number = np.asarray(next_number, dtype='int') seq", "= init_num seq = np.append(seq, n) while True: if ((n%2)==0): next_number = n/2", "= np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length", "dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train =", "= [] y_train = [] num = 0 for n in range(0,10000): sequence", "seq_len = len(sequence) x_training = sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training = sequence[1:seq_len]", "print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length) # print(x_train[26]) # np.save('generated_data.npy',", "output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train = np.asarray(y_train)", "output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length)", "np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length) # print(x_train[26]) #", "x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length", "if ((n%2)==0): next_number = n/2 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number)", "seq = np.append(seq, next_number) if next_number==1: break else: next_number = (3*n)+1 next_number =", "dtype='int') n = init_num seq = np.append(seq, n) while True: if ((n%2)==0): next_number", "y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length =", "def function_generator(init_num): seq = np.array([], dtype='int') n = init_num seq = np.append(seq, n)", "= np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length) # print(x_train[26])", "= output_seq_length.max() # print(max_length) # print(x_train[26]) # np.save('generated_data.npy', gen_data) # np.save('x_train.npy', x_train) #", "len(sequence) x_training = sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training =", "sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train", "if next_number==1: break else: next_number = (3*n)+1 next_number = np.asarray(next_number, dtype='int') seq =", "seq = np.array([], dtype='int') n = init_num seq = np.append(seq, n) while True:", "x_training = sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training = np.array(y_training,", "True: if ((n%2)==0): next_number = n/2 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq,", "dtype='int') seq = np.append(seq, next_number) if next_number==1: break else: next_number = (3*n)+1 next_number", "output_seq_length = [] x_train = [] y_train = [] num = 0 for", "return seq output_seq_data = [] output_seq_length = [] x_train = [] y_train =", "= len(sequence) x_training = sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training", "x_training = np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len)", "[] y_train = [] num = 0 for n in range(0,10000): sequence =", "range(0,10000): sequence = function_generator(n+1) seq_len = len(sequence) x_training = sequence[:(seq_len-1)] x_training = np.array(x_training,", "output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26])", "= np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) n = next_number return seq output_seq_data", "seq output_seq_data = [] output_seq_length = [] x_train = [] y_train = []", "= np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max() #", "break else: next_number = (3*n)+1 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number)", "while True: if ((n%2)==0): next_number = n/2 next_number = np.asarray(next_number, dtype='int') seq =", "dtype='int') y_training = sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data", "np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length =", "dtype='int') seq = np.append(seq, next_number) n = next_number return seq output_seq_data = []", "= 0 for n in range(0,10000): sequence = function_generator(n+1) seq_len = len(sequence) x_training", "np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) if next_number==1: break else: next_number = (3*n)+1", "next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) if next_number==1: break else: next_number", "init_num seq = np.append(seq, n) while True: if ((n%2)==0): next_number = n/2 next_number", "seq = np.append(seq, n) while True: if ((n%2)==0): next_number = n/2 next_number =", "np.append(seq, next_number) if next_number==1: break else: next_number = (3*n)+1 next_number = np.asarray(next_number, dtype='int')", "y_train = np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length) #", "[] num = 0 for n in range(0,10000): sequence = function_generator(n+1) seq_len =", "np.append(seq, next_number) n = next_number return seq output_seq_data = [] output_seq_length = []", "= np.append(seq, next_number) n = next_number return seq output_seq_data = [] output_seq_length =", "next_number return seq output_seq_data = [] output_seq_length = [] x_train = [] y_train", "= (3*n)+1 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) n = next_number", "= function_generator(n+1) seq_len = len(sequence) x_training = sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training", "for n in range(0,10000): sequence = function_generator(n+1) seq_len = len(sequence) x_training = sequence[:(seq_len-1)]", "[] x_train = [] y_train = [] num = 0 for n in", "x_train = [] y_train = [] num = 0 for n in range(0,10000):", "= np.append(seq, n) while True: if ((n%2)==0): next_number = n/2 next_number = np.asarray(next_number,", "= [] output_seq_length = [] x_train = [] y_train = [] num =", "[] output_seq_length = [] x_train = [] y_train = [] num = 0", "function_generator(init_num): seq = np.array([], dtype='int') n = init_num seq = np.append(seq, n) while", "= np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) if next_number==1: break else: next_number =", "= np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train)", "numpy as np import pickle as pkl def function_generator(init_num): seq = np.array([], dtype='int')", "np.array([], dtype='int') n = init_num seq = np.append(seq, n) while True: if ((n%2)==0):", "= np.array([], dtype='int') n = init_num seq = np.append(seq, n) while True: if", "num = 0 for n in range(0,10000): sequence = function_generator(n+1) seq_len = len(sequence)", "next_number==1: break else: next_number = (3*n)+1 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq,", "np import pickle as pkl def function_generator(init_num): seq = np.array([], dtype='int') n =", "import pickle as pkl def function_generator(init_num): seq = np.array([], dtype='int') n = init_num", "next_number = (3*n)+1 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) n =", "pkl def function_generator(init_num): seq = np.array([], dtype='int') n = init_num seq = np.append(seq,", "= np.append(seq, next_number) if next_number==1: break else: next_number = (3*n)+1 next_number = np.asarray(next_number,", "sequence = function_generator(n+1) seq_len = len(sequence) x_training = sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int')", "in range(0,10000): sequence = function_generator(n+1) seq_len = len(sequence) x_training = sequence[:(seq_len-1)] x_training =", "np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training)", "output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length) # print(x_train[26]) # np.save('generated_data.npy', gen_data)", "output_seq_data = [] output_seq_length = [] x_train = [] y_train = [] num", "np.append(seq, n) while True: if ((n%2)==0): next_number = n/2 next_number = np.asarray(next_number, dtype='int')", "y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train =", "# print(max_length) # print(x_train[26]) # np.save('generated_data.npy', gen_data) # np.save('x_train.npy', x_train) # np.save('y_train.npy', y_train)", "pickle as pkl def function_generator(init_num): seq = np.array([], dtype='int') n = init_num seq", "y_train = [] num = 0 for n in range(0,10000): sequence = function_generator(n+1)", "= [] x_train = [] y_train = [] num = 0 for n", "n = init_num seq = np.append(seq, n) while True: if ((n%2)==0): next_number =", "next_number) n = next_number return seq output_seq_data = [] output_seq_length = [] x_train", "(3*n)+1 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) n = next_number return", "= np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length) # print(x_train[26]) # np.save('generated_data.npy', gen_data) #", "as pkl def function_generator(init_num): seq = np.array([], dtype='int') n = init_num seq =", "n = next_number return seq output_seq_data = [] output_seq_length = [] x_train =", "max_length = output_seq_length.max() # print(max_length) # print(x_train[26]) # np.save('generated_data.npy', gen_data) # np.save('x_train.npy', x_train)", "next_number = n/2 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) if next_number==1:", "= sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training = np.array(y_training, dtype='int')", "= [] num = 0 for n in range(0,10000): sequence = function_generator(n+1) seq_len", "n in range(0,10000): sequence = function_generator(n+1) seq_len = len(sequence) x_training = sequence[:(seq_len-1)] x_training", "0 for n in range(0,10000): sequence = function_generator(n+1) seq_len = len(sequence) x_training =", "next_number) if next_number==1: break else: next_number = (3*n)+1 next_number = np.asarray(next_number, dtype='int') seq", "np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data) x_train = np.asarray(x_train) y_train", "= sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data = np.asarray(output_seq_data)", "np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max() # print(max_length)", "np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) n = next_number return seq output_seq_data =", "= np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training)", "next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) n = next_number return seq", "x_train = np.asarray(x_train) y_train = np.asarray(y_train) print(y_train[26]) output_seq_length = np.asarray(output_seq_length) max_length = output_seq_length.max()", "import numpy as np import pickle as pkl def function_generator(init_num): seq = np.array([],", "sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training = sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence)", "output_seq_length.max() # print(max_length) # print(x_train[26]) # np.save('generated_data.npy', gen_data) # np.save('x_train.npy', x_train) # np.save('y_train.npy',", "((n%2)==0): next_number = n/2 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) if", "seq = np.append(seq, next_number) n = next_number return seq output_seq_data = [] output_seq_length", "function_generator(n+1) seq_len = len(sequence) x_training = sequence[:(seq_len-1)] x_training = np.array(x_training, dtype='int') y_training =", "y_training = sequence[1:seq_len] y_training = np.array(y_training, dtype='int') output_seq_data.append(sequence) output_seq_length.append(seq_len) x_train.append(x_training) y_train.append(y_training) output_seq_data =", "n/2 next_number = np.asarray(next_number, dtype='int') seq = np.append(seq, next_number) if next_number==1: break else:" ]
[ "#zwraca znormalizowany wektor [x, y] def normalized(x, y=None): if y is not None:", "len(self.elements) == 0 class MedianFilter: #size to wielkosc kernela filtra def __init__(self, size):", "def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def empty(self): return len(self.elements) ==", "y is not None: if x == 0 and y == 0: return", "mediane liczb z tablicy 'data' def Median(data): order = sorted(data) size = len(order)", "= len(target) sum = 0.0 for i in range(size): a = int(target[i]) -", "+ order[size]) / 2 return order[size//2] #kolejka priorytetowa class PriorityQueue: def __init__(self): self.elements", "int(target[i]) - value[i] sum += a * a return sum #zwraca wartosc 'num'", "#zwraca kwadrat liczby, ale z jej znakiem def signedSqr(num): if num >= 0:", "(x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy target a value def errorsquare(target, value): size", "/ (1 - num) return num / (1 + num) #zwraca kwadrat liczby,", "y=None): if y is not None: return x*x + y*y return x[0] *", "if num > 0: return 1.0 if num < 0: return -1.0 return", "#interpolacja liniowa def lerp(a, b, c): return c*b + (1-c) * a #zwraca", "self.index == self.size: self.index = 0 #zwraca przefiltrowana wartosc def getValue(self): return Median(self.data)", "- A[1] * B[0] #zwraca mediane liczb z tablicy 'data' def Median(data): order", "* size self.size = size self.index = 0 #dodaje element do tablicy danych", "def lerp(a, b, c): return c*b + (1-c) * a #zwraca dlugosc wektora", "* num #zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >=", "get(self): return self.data[self.index] #symuluje uplyw czasu o 1 jednostke; musi byc wywolane pomiedzy", "B def sqrDistance(A, B): x = A[0] - B[0] y = A[1] -", "target a value def errorsquare(target, value): size = len(target) sum = 0.0 for", "= magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy target a value def", "x*x + y*y #zwraca znormalizowany wektor [x, y] def normalized(x, y=None): if y", "ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >= 0: return (num+1)**2 - 1", "getValue(self): return Median(self.data) #pozwala na symulowanie opoznienia w pomiarach class SignalDelay: #'delay' oznacza,", "[x, y] def magnitude(x, y=None): if y is not None: return math.sqrt(x*x +", "B[1] return math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci miedzy punktami A i B", "+ x[1] * x[1] #zwraca dlugosc wektora [x, y] def magnitude(x, y=None): if", "tablicy 'data' def Median(data): order = sorted(data) size = len(order) if size %", "lerp(a, b, c): return c*b + (1-c) * a #zwraca dlugosc wektora [x,", "PriorityQueue: def __init__(self): self.elements = [] #dodaje element do kolejki def push(self, item,", "order = sorted(data) size = len(order) if size % 2 == 0: size", "range(size): a = int(target[i]) - value[i] sum += a * a return sum", "kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >= 0: return (num+1)**2", "a #zwraca dlugosc wektora [x, y] do kwadratu def sqrMagnitude(x, y=None): if y", "priorytetowa class PriorityQueue: def __init__(self): self.elements = [] #dodaje element do kolejki def", "2 == 0: size = size // 2 return (order[size-1] + order[size]) /", "num * num return -num * num #zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1", "self.index = 0 #dodaje element do struktury danych def push(self, element): self.data[self.index] =", "if size % 2 == 0: size = size // 2 return (order[size-1]", "miedzy wektorami A i B def dot(A, B): return A[0] * B[0] +", "dot(A, B): return A[0] * B[0] + A[1] * B[1] #zwraca wartosc iloczynu", "__init__(self, size): self.data = [0.0] * size self.size = size self.index = 0", "= 0 #dodaje element do struktury danych def push(self, element): self.data[self.index] = element", "y is not None: return x*x + y*y return x[0] * x[0] +", "is not None: return x*x + y*y return x[0] * x[0] + x[1]", "#zwraca kwadrat odleglosci miedzy punktami A i B def sqrDistance(A, B): x =", "num) return num / (1 + num) #zwraca kwadrat liczby, ale z jej", "math import heapq #zwrca znak liczby def sign(num): if num > 0: return", "/ (1 + num) #zwraca kwadrat liczby, ale z jej znakiem def signedSqr(num):", "if y is not None: return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1])", "+ y*y #zwraca znormalizowany wektor [x, y] def normalized(x, y=None): if y is", "def clamp(num, _min, _max): if num > _max: return _max elif num <", "y/mag) else: if x[0] == 0 and x[1] == 0: return (0, 0)", "ile pomiarow jest miedzy aktualna wartoscia, a opozniana def __init__(self, delay, dtype): self.delay", "import math import heapq #zwrca znak liczby def sign(num): if num > 0:", "kwadratowa miedzy target a value def errorsquare(target, value): size = len(target) sum =", "#zwraca przefiltrowana wartosc def getValue(self): return Median(self.data) #pozwala na symulowanie opoznienia w pomiarach", "len(target) sum = 0.0 for i in range(size): a = int(target[i]) - value[i]", "miedzy wektorami A i B def cross(A, B): return A[0] * B[1] -", "num < 0: return num / (1 - num) return num / (1", "return order[size//2] #kolejka priorytetowa class PriorityQueue: def __init__(self): self.elements = [] #dodaje element", "element): self.data[self.index] = element #zwraca element opozniony o 'delay' def get(self): return self.data[self.index]", "dlugosc wektora [x, y] do kwadratu def sqrMagnitude(x, y=None): if y is not", "[x, y] do kwadratu def sqrMagnitude(x, y=None): if y is not None: return", "= [dtype] * delay self.index = 0 #dodaje element do struktury danych def", "self.index = 0 #dodaje element do tablicy danych filtra def push(self, num): self.data[self.index]", "znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >= 0: return (num+1)**2 - 1 return", "poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def empty(self): return", "#zwraca wartosc iloczynu wektorowego miedzy wektorami A i B def cross(A, B): return", "num >= 0: return (num+1)**2 - 1 return 1 - (1-num)**2 #interpolacja liniowa", "A[0] - B[0] y = A[1] - B[1] return x*x + y*y #zwraca", "uplyw czasu o 1 jednostke; musi byc wywolane pomiedzy uzyciem 'push' a 'get'", "MedianFilter: #size to wielkosc kernela filtra def __init__(self, size): self.data = [0.0] *", "== 0: return (0, 0) mag = magnitude(x, y) return (x/mag, y/mag) else:", "0: return (0, 0) mag = magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa", "num < _min: return _min return num #zwraca wartosc iloczynu skalarnego miedzy wektorami", "byc wywolane pomiedzy uzyciem 'push' a 'get' def tick(self): self.index += 1 if", "= A[1] - B[1] return x*x + y*y #zwraca znormalizowany wektor [x, y]", "skalarnego miedzy wektorami A i B def dot(A, B): return A[0] * B[0]", "0 #dodaje element do struktury danych def push(self, element): self.data[self.index] = element #zwraca", "not None: return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy", "kolejki def push(self, item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca element z", "cross(A, B): return A[0] * B[1] - A[1] * B[0] #zwraca mediane liczb", "#zwraca wartosc iloczynu skalarnego miedzy wektorami A i B def dot(A, B): return", "self.index += 1 if self.index == self.size: self.index = 0 #zwraca przefiltrowana wartosc", "return 0.0 def softsign(num): if num < 0: return num / (1 -", "class MedianFilter: #size to wielkosc kernela filtra def __init__(self, size): self.data = [0.0]", "sum #zwraca wartosc 'num' ograniczana przez <_min, _max> def clamp(num, _min, _max): if", "B[0] y = A[1] - B[1] return x*x + y*y #zwraca znormalizowany wektor", "size = len(order) if size % 2 == 0: size = size //", "[dtype] * delay self.index = 0 #dodaje element do struktury danych def push(self,", "#zwraca element opozniony o 'delay' def get(self): return self.data[self.index] #symuluje uplyw czasu o", "- B[0] y = A[1] - B[1] return x*x + y*y #zwraca znormalizowany", "< 0: return num / (1 - num) return num / (1 +", "/ 2 return order[size//2] #kolejka priorytetowa class PriorityQueue: def __init__(self): self.elements = []", "_max: return _max elif num < _min: return _min return num #zwraca wartosc", "order[size//2] #kolejka priorytetowa class PriorityQueue: def __init__(self): self.elements = [] #dodaje element do", "A i B def sqrDistance(A, B): x = A[0] - B[0] y =", "musi byc wywolane pomiedzy uzyciem 'push' a 'get' def tick(self): self.index += 1", "num return -num * num #zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num):", "z poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def empty(self):", "punktami A i B def distance(A, B): x = A[0] - B[0] y", "not None: return x*x + y*y return x[0] * x[0] + x[1] *", "element z poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def", "0.0 for i in range(size): a = int(target[i]) - value[i] sum += a", "heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def empty(self): return len(self.elements) == 0 class MedianFilter:", "value def errorsquare(target, value): size = len(target) sum = 0.0 for i in", "przefiltrowana wartosc def getValue(self): return Median(self.data) #pozwala na symulowanie opoznienia w pomiarach class", "== self.size: self.index = 0 #zwraca przefiltrowana wartosc def getValue(self): return Median(self.data) #pozwala", "= size // 2 return (order[size-1] + order[size]) / 2 return order[size//2] #kolejka", "None: if x == 0 and y == 0: return (0, 0) mag", "y=None): if y is not None: if x == 0 and y ==", "#zwraca wartosc 'num' ograniczana przez <_min, _max> def clamp(num, _min, _max): if num", "def push(self, item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca element z poczatku", "def normalized(x, y=None): if y is not None: if x == 0 and", "(0, 0) mag = magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy target", "sqrDistance(A, B): x = A[0] - B[0] y = A[1] - B[1] return", "kwadratu def sqrMagnitude(x, y=None): if y is not None: return x*x + y*y", "num self.index += 1 if self.index == self.size: self.index = 0 #zwraca przefiltrowana", "z jej znakiem def signedSqr(num): if num >= 0: return num * num", "_min return num #zwraca wartosc iloczynu skalarnego miedzy wektorami A i B def", "do tablicy danych filtra def push(self, num): self.data[self.index] = num self.index += 1", "def dot(A, B): return A[0] * B[0] + A[1] * B[1] #zwraca wartosc", "y*y) #zwraca kwadrat odleglosci miedzy punktami A i B def sqrDistance(A, B): x", "wektor [x, y] def normalized(x, y=None): if y is not None: if x", "_max> def clamp(num, _min, _max): if num > _max: return _max elif num", "ale z jej znakiem def signedSqr(num): if num >= 0: return num *", "num #zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >= 0:", "+ y*y return x[0] * x[0] + x[1] * x[1] #zwraca dlugosc wektora", "def __init__(self): self.elements = [] #dodaje element do kolejki def push(self, item, priority):", "def sign(num): if num > 0: return 1.0 if num < 0: return", "_max elif num < _min: return _min return num #zwraca wartosc iloczynu skalarnego", "len(order) if size % 2 == 0: size = size // 2 return", "#'delay' oznacza, ile pomiarow jest miedzy aktualna wartoscia, a opozniana def __init__(self, delay,", "+ y*y) #zwraca kwadrat odleglosci miedzy punktami A i B def sqrDistance(A, B):", "i in range(size): a = int(target[i]) - value[i] sum += a * a", "delay, dtype): self.delay = delay self.data = [dtype] * delay self.index = 0", "0: return -1.0 return 0.0 def softsign(num): if num < 0: return num", "== 0 and y == 0: return (0, 0) mag = magnitude(x, y)", "miedzy punktami A i B def sqrDistance(A, B): x = A[0] - B[0]", "filtra def __init__(self, size): self.data = [0.0] * size self.size = size self.index", "return x*x + y*y #zwraca znormalizowany wektor [x, y] def normalized(x, y=None): if", "x[1] == 0: return (0, 0) mag = magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca", "a * a return sum #zwraca wartosc 'num' ograniczana przez <_min, _max> def", "1 if self.index == self.size: self.index = 0 #zwraca przefiltrowana wartosc def getValue(self):", "num / (1 - num) return num / (1 + num) #zwraca kwadrat", "wektorami A i B def dot(A, B): return A[0] * B[0] + A[1]", "return 1.0 if num < 0: return -1.0 return 0.0 def softsign(num): if", "return (0, 0) mag = magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy", "2 return order[size//2] #kolejka priorytetowa class PriorityQueue: def __init__(self): self.elements = [] #dodaje", "self.elements = [] #dodaje element do kolejki def push(self, item, priority): heapq.heappush(self.elements, (priority,", "return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy punktami A i B def distance(A,", "order[size]) / 2 return order[size//2] #kolejka priorytetowa class PriorityQueue: def __init__(self): self.elements =", "= size self.index = 0 #dodaje element do tablicy danych filtra def push(self,", "#PRZYDATNE FUNKCJE MATEMATYCZNE import math import heapq #zwrca znak liczby def sign(num): if", "clamp(num, _min, _max): if num > _max: return _max elif num < _min:", "wartosc def getValue(self): return Median(self.data) #pozwala na symulowanie opoznienia w pomiarach class SignalDelay:", "a 'get' def tick(self): self.index += 1 if self.index == self.delay: self.index =", "= num self.index += 1 if self.index == self.size: self.index = 0 #zwraca", "def sqrDistance(A, B): x = A[0] - B[0] y = A[1] - B[1]", "* B[0] + A[1] * B[1] #zwraca wartosc iloczynu wektorowego miedzy wektorami A", "0 and x[1] == 0: return (0, 0) mag = magnitude(x) return (x[0]/mag,", "return num / (1 - num) return num / (1 + num) #zwraca", "return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy target a value def errorsquare(target, value):", "if num >= 0: return num * num return -num * num #zwraca", "math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy punktami A i", "softsign(num): if num < 0: return num / (1 - num) return num", "return -1.0 return 0.0 def softsign(num): if num < 0: return num /", "jest pusta? def empty(self): return len(self.elements) == 0 class MedianFilter: #size to wielkosc", "magnitude(x, y) return (x/mag, y/mag) else: if x[0] == 0 and x[1] ==", "num > 0: return 1.0 if num < 0: return -1.0 return 0.0", "z tablicy 'data' def Median(data): order = sorted(data) size = len(order) if size", "(1-num)**2 #interpolacja liniowa def lerp(a, b, c): return c*b + (1-c) * a", "y = A[1] - B[1] return x*x + y*y #zwraca znormalizowany wektor [x,", "== 0: size = size // 2 return (order[size-1] + order[size]) / 2", "A[0] * B[0] + A[1] * B[1] #zwraca wartosc iloczynu wektorowego miedzy wektorami", "return num * num return -num * num #zwraca kwadrat liczby ze znakiem;", "size): self.data = [0.0] * size self.size = size self.index = 0 #dodaje", "B): return A[0] * B[1] - A[1] * B[0] #zwraca mediane liczb z", "wektora [x, y] def magnitude(x, y=None): if y is not None: return math.sqrt(x*x", "y = A[1] - B[1] return math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci miedzy", "i B def distance(A, B): x = A[0] - B[0] y = A[1]", "kwadrat liczby, ale z jej znakiem def signedSqr(num): if num >= 0: return", "= delay self.data = [dtype] * delay self.index = 0 #dodaje element do", "pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def empty(self): return len(self.elements) == 0", "iloczynu skalarnego miedzy wektorami A i B def dot(A, B): return A[0] *", "if num > _max: return _max elif num < _min: return _min return", "c): return c*b + (1-c) * a #zwraca dlugosc wektora [x, y] do", "#zwraca dlugosc wektora [x, y] def magnitude(x, y=None): if y is not None:", "empty(self): return len(self.elements) == 0 class MedianFilter: #size to wielkosc kernela filtra def", ">= 0: return num * num return -num * num #zwraca kwadrat liczby", "for i in range(size): a = int(target[i]) - value[i] sum += a *", "math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy punktami A i B def distance(A, B):", "// 2 return (order[size-1] + order[size]) / 2 return order[size//2] #kolejka priorytetowa class", "pusta? def empty(self): return len(self.elements) == 0 class MedianFilter: #size to wielkosc kernela", "mag = magnitude(x, y) return (x/mag, y/mag) else: if x[0] == 0 and", "else: if x[0] == 0 and x[1] == 0: return (0, 0) mag", "if num < 0: return num / (1 - num) return num /", "return (x/mag, y/mag) else: if x[0] == 0 and x[1] == 0: return", "return self.data[self.index] #symuluje uplyw czasu o 1 jednostke; musi byc wywolane pomiedzy uzyciem", "+ (1-c) * a #zwraca dlugosc wektora [x, y] do kwadratu def sqrMagnitude(x,", "return x[0] * x[0] + x[1] * x[1] #zwraca dlugosc wektora [x, y]", "danych def push(self, element): self.data[self.index] = element #zwraca element opozniony o 'delay' def", "return sum #zwraca wartosc 'num' ograniczana przez <_min, _max> def clamp(num, _min, _max):", "import heapq #zwrca znak liczby def sign(num): if num > 0: return 1.0", "znormalizowany wektor [x, y] def normalized(x, y=None): if y is not None: if", "= [0.0] * size self.size = size self.index = 0 #dodaje element do", "delay self.index = 0 #dodaje element do struktury danych def push(self, element): self.data[self.index]", "__init__(self, delay, dtype): self.delay = delay self.data = [dtype] * delay self.index =", "- (1-num)**2 #interpolacja liniowa def lerp(a, b, c): return c*b + (1-c) *", "A i B def distance(A, B): x = A[0] - B[0] y =", "#zwraca mediane liczb z tablicy 'data' def Median(data): order = sorted(data) size =", "0 #zwraca przefiltrowana wartosc def getValue(self): return Median(self.data) #pozwala na symulowanie opoznienia w", "#kolejka priorytetowa class PriorityQueue: def __init__(self): self.elements = [] #dodaje element do kolejki", "x[1] * x[1] #zwraca dlugosc wektora [x, y] def magnitude(x, y=None): if y", "= len(order) if size % 2 == 0: size = size // 2", "x[1]/mag) #zwraca roznice kwadratowa miedzy target a value def errorsquare(target, value): size =", "math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci miedzy punktami A i B def sqrDistance(A,", "* a #zwraca dlugosc wektora [x, y] do kwadratu def sqrMagnitude(x, y=None): if", "self.delay = delay self.data = [dtype] * delay self.index = 0 #dodaje element", "#zdejmuje i zwraca element z poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka", "B[0] y = A[1] - B[1] return math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci", "* delay self.index = 0 #dodaje element do struktury danych def push(self, element):", "<reponame>janw23/Ballance<filename>PythonCode/MathModule.py #PRZYDATNE FUNKCJE MATEMATYCZNE import math import heapq #zwrca znak liczby def sign(num):", "miedzy target a value def errorsquare(target, value): size = len(target) sum = 0.0", "A i B def cross(A, B): return A[0] * B[1] - A[1] *", "0) mag = magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy target a", "B[1] return x*x + y*y #zwraca znormalizowany wektor [x, y] def normalized(x, y=None):", "def Median(data): order = sorted(data) size = len(order) if size % 2 ==", "num > _max: return _max elif num < _min: return _min return num", "x == 0 and y == 0: return (0, 0) mag = magnitude(x,", "(num+1)**2 - 1 return 1 - (1-num)**2 #interpolacja liniowa def lerp(a, b, c):", "< 0: return -1.0 return 0.0 def softsign(num): if num < 0: return", "element #zwraca element opozniony o 'delay' def get(self): return self.data[self.index] #symuluje uplyw czasu", "czasu o 1 jednostke; musi byc wywolane pomiedzy uzyciem 'push' a 'get' def", "f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >= 0: return (num+1)**2 - 1 return 1", "struktury danych def push(self, element): self.data[self.index] = element #zwraca element opozniony o 'delay'", "w pomiarach class SignalDelay: #'delay' oznacza, ile pomiarow jest miedzy aktualna wartoscia, a", "if num < 0: return -1.0 return 0.0 def softsign(num): if num <", "magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy target a value def errorsquare(target,", "value): size = len(target) sum = 0.0 for i in range(size): a =", "return _max elif num < _min: return _min return num #zwraca wartosc iloczynu", "opozniana def __init__(self, delay, dtype): self.delay = delay self.data = [dtype] * delay", "#pozwala na symulowanie opoznienia w pomiarach class SignalDelay: #'delay' oznacza, ile pomiarow jest", "elif num < _min: return _min return num #zwraca wartosc iloczynu skalarnego miedzy", "B def dot(A, B): return A[0] * B[0] + A[1] * B[1] #zwraca", "y] def magnitude(x, y=None): if y is not None: return math.sqrt(x*x + y*y)", "errorsquare(target, value): size = len(target) sum = 0.0 for i in range(size): a", "return -num * num #zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if", "(1 + num) #zwraca kwadrat liczby, ale z jej znakiem def signedSqr(num): if", "SignalDelay: #'delay' oznacza, ile pomiarow jest miedzy aktualna wartoscia, a opozniana def __init__(self,", "self.data[self.index] = element #zwraca element opozniony o 'delay' def get(self): return self.data[self.index] #symuluje", "return 1 - (1-num)**2 #interpolacja liniowa def lerp(a, b, c): return c*b +", "do kwadratu def sqrMagnitude(x, y=None): if y is not None: return x*x +", "return x*x + y*y return x[0] * x[0] + x[1] * x[1] #zwraca", "in range(size): a = int(target[i]) - value[i] sum += a * a return", "wektorowego miedzy wektorami A i B def cross(A, B): return A[0] * B[1]", "liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >= 0: return (num+1)**2 -", "a return sum #zwraca wartosc 'num' ograniczana przez <_min, _max> def clamp(num, _min,", "return Median(self.data) #pozwala na symulowanie opoznienia w pomiarach class SignalDelay: #'delay' oznacza, ile", "B[0] #zwraca mediane liczb z tablicy 'data' def Median(data): order = sorted(data) size", "size % 2 == 0: size = size // 2 return (order[size-1] +", "return _min return num #zwraca wartosc iloczynu skalarnego miedzy wektorami A i B", "wartosc 'num' ograniczana przez <_min, _max> def clamp(num, _min, _max): if num >", "self.data = [0.0] * size self.size = size self.index = 0 #dodaje element", "* a return sum #zwraca wartosc 'num' ograniczana przez <_min, _max> def clamp(num,", "sign(num): if num > 0: return 1.0 if num < 0: return -1.0", "A[1] * B[1] #zwraca wartosc iloczynu wektorowego miedzy wektorami A i B def", "pomiedzy uzyciem 'push' a 'get' def tick(self): self.index += 1 if self.index ==", "item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca element z poczatku kolejki def", "return A[0] * B[0] + A[1] * B[1] #zwraca wartosc iloczynu wektorowego miedzy", "i zwraca element z poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest", "0: return num / (1 - num) return num / (1 + num)", "return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy punktami A", "0 and y == 0: return (0, 0) mag = magnitude(x, y) return", "== 0 class MedianFilter: #size to wielkosc kernela filtra def __init__(self, size): self.data", "sqrMagnitude(x, y=None): if y is not None: return x*x + y*y return x[0]", "(0, 0) mag = magnitude(x, y) return (x/mag, y/mag) else: if x[0] ==", "B): x = A[0] - B[0] y = A[1] - B[1] return x*x", "return c*b + (1-c) * a #zwraca dlugosc wektora [x, y] do kwadratu", "c*b + (1-c) * a #zwraca dlugosc wektora [x, y] do kwadratu def", "magnitude(x, y=None): if y is not None: return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0]", "#zwraca dlugosc wektora [x, y] do kwadratu def sqrMagnitude(x, y=None): if y is", "punktami A i B def sqrDistance(A, B): x = A[0] - B[0] y", "do struktury danych def push(self, element): self.data[self.index] = element #zwraca element opozniony o", "wartosc iloczynu wektorowego miedzy wektorami A i B def cross(A, B): return A[0]", "self.index = 0 #zwraca przefiltrowana wartosc def getValue(self): return Median(self.data) #pozwala na symulowanie", "kwadrat odleglosci miedzy punktami A i B def sqrDistance(A, B): x = A[0]", "symulowanie opoznienia w pomiarach class SignalDelay: #'delay' oznacza, ile pomiarow jest miedzy aktualna", "(priority, item)) #zdejmuje i zwraca element z poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1]", "do kolejki def push(self, item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca element", "oznacza, ile pomiarow jest miedzy aktualna wartoscia, a opozniana def __init__(self, delay, dtype):", "'num' ograniczana przez <_min, _max> def clamp(num, _min, _max): if num > _max:", "+ x[1]*x[1]) #zwraca odleglosc miedzy punktami A i B def distance(A, B): x", "y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy punktami A i B def", "= A[0] - B[0] y = A[1] - B[1] return math.sqrt(x*x + y*y)", "1 - (1-num)**2 #interpolacja liniowa def lerp(a, b, c): return c*b + (1-c)", "x[1]*x[1]) #zwraca odleglosc miedzy punktami A i B def distance(A, B): x =", "(order[size-1] + order[size]) / 2 return order[size//2] #kolejka priorytetowa class PriorityQueue: def __init__(self):", "element do kolejki def push(self, item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca", "distance(A, B): x = A[0] - B[0] y = A[1] - B[1] return", "'delay' def get(self): return self.data[self.index] #symuluje uplyw czasu o 1 jednostke; musi byc", "znakiem def signedSqr(num): if num >= 0: return num * num return -num", "b, c): return c*b + (1-c) * a #zwraca dlugosc wektora [x, y]", "0 class MedianFilter: #size to wielkosc kernela filtra def __init__(self, size): self.data =", "#dodaje element do tablicy danych filtra def push(self, num): self.data[self.index] = num self.index", ">= 0: return (num+1)**2 - 1 return 1 - (1-num)**2 #interpolacja liniowa def", "num < 0: return -1.0 return 0.0 def softsign(num): if num < 0:", "liczby, ale z jej znakiem def signedSqr(num): if num >= 0: return num", "y*y return x[0] * x[0] + x[1] * x[1] #zwraca dlugosc wektora [x,", "liczb z tablicy 'data' def Median(data): order = sorted(data) size = len(order) if", "* B[1] - A[1] * B[0] #zwraca mediane liczb z tablicy 'data' def", "zwraca element z poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest pusta?", "element opozniony o 'delay' def get(self): return self.data[self.index] #symuluje uplyw czasu o 1", "y) return (x/mag, y/mag) else: if x[0] == 0 and x[1] == 0:", "= sorted(data) size = len(order) if size % 2 == 0: size =", "if num >= 0: return (num+1)**2 - 1 return 1 - (1-num)**2 #interpolacja", "a opozniana def __init__(self, delay, dtype): self.delay = delay self.data = [dtype] *", "aktualna wartoscia, a opozniana def __init__(self, delay, dtype): self.delay = delay self.data =", "- 1 return 1 - (1-num)**2 #interpolacja liniowa def lerp(a, b, c): return", "iloczynu wektorowego miedzy wektorami A i B def cross(A, B): return A[0] *", "self.data[self.index] #symuluje uplyw czasu o 1 jednostke; musi byc wywolane pomiedzy uzyciem 'push'", "wielkosc kernela filtra def __init__(self, size): self.data = [0.0] * size self.size =", "A i B def dot(A, B): return A[0] * B[0] + A[1] *", "A[1] - B[1] return x*x + y*y #zwraca znormalizowany wektor [x, y] def", "przez <_min, _max> def clamp(num, _min, _max): if num > _max: return _max", "_min, _max): if num > _max: return _max elif num < _min: return", "x = A[0] - B[0] y = A[1] - B[1] return x*x +", "def errorsquare(target, value): size = len(target) sum = 0.0 for i in range(size):", "B[0] + A[1] * B[1] #zwraca wartosc iloczynu wektorowego miedzy wektorami A i", "roznice kwadratowa miedzy target a value def errorsquare(target, value): size = len(target) sum", "value[i] sum += a * a return sum #zwraca wartosc 'num' ograniczana przez", "B[1] - A[1] * B[0] #zwraca mediane liczb z tablicy 'data' def Median(data):", "jednostke; musi byc wywolane pomiedzy uzyciem 'push' a 'get' def tick(self): self.index +=", "0 #dodaje element do tablicy danych filtra def push(self, num): self.data[self.index] = num", "#zwrca znak liczby def sign(num): if num > 0: return 1.0 if num", "def push(self, num): self.data[self.index] = num self.index += 1 if self.index == self.size:", "x = A[0] - B[0] y = A[1] - B[1] return math.sqrt(x*x +", "pomiarach class SignalDelay: #'delay' oznacza, ile pomiarow jest miedzy aktualna wartoscia, a opozniana", "== 0: return (0, 0) mag = magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice", "def get(self): return self.data[self.index] #symuluje uplyw czasu o 1 jednostke; musi byc wywolane", "#size to wielkosc kernela filtra def __init__(self, size): self.data = [0.0] * size", "B[1] #zwraca wartosc iloczynu wektorowego miedzy wektorami A i B def cross(A, B):", "None: return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy punktami", "* B[0] #zwraca mediane liczb z tablicy 'data' def Median(data): order = sorted(data)", "def magnitude(x, y=None): if y is not None: return math.sqrt(x*x + y*y) return", "is not None: return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc", "def __init__(self, size): self.data = [0.0] * size self.size = size self.index =", "[x, y] def normalized(x, y=None): if y is not None: if x ==", "B def distance(A, B): x = A[0] - B[0] y = A[1] -", "sorted(data) size = len(order) if size % 2 == 0: size = size", "#dodaje element do kolejki def push(self, item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i", "= element #zwraca element opozniony o 'delay' def get(self): return self.data[self.index] #symuluje uplyw", "(1 - num) return num / (1 + num) #zwraca kwadrat liczby, ale", "push(self, num): self.data[self.index] = num self.index += 1 if self.index == self.size: self.index", "return num #zwraca wartosc iloczynu skalarnego miedzy wektorami A i B def dot(A,", "is not None: if x == 0 and y == 0: return (0,", "#dodaje element do struktury danych def push(self, element): self.data[self.index] = element #zwraca element", "push(self, element): self.data[self.index] = element #zwraca element opozniony o 'delay' def get(self): return", "1.0 if num < 0: return -1.0 return 0.0 def softsign(num): if num", "0: return num * num return -num * num #zwraca kwadrat liczby ze", "if y is not None: if x == 0 and y == 0:", "to wielkosc kernela filtra def __init__(self, size): self.data = [0.0] * size self.size", "i B def cross(A, B): return A[0] * B[1] - A[1] * B[0]", "return num / (1 + num) #zwraca kwadrat liczby, ale z jej znakiem", "element do tablicy danych filtra def push(self, num): self.data[self.index] = num self.index +=", "and y == 0: return (0, 0) mag = magnitude(x, y) return (x/mag,", "if y is not None: return x*x + y*y return x[0] * x[0]", "size = size // 2 return (order[size-1] + order[size]) / 2 return order[size//2]", "def sqrMagnitude(x, y=None): if y is not None: return x*x + y*y return", "heapq #zwrca znak liczby def sign(num): if num > 0: return 1.0 if", "miedzy punktami A i B def distance(A, B): x = A[0] - B[0]", "item)) #zdejmuje i zwraca element z poczatku kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy", "-1.0 return 0.0 def softsign(num): if num < 0: return num / (1", "= A[0] - B[0] y = A[1] - B[1] return x*x + y*y", "i B def dot(A, B): return A[0] * B[0] + A[1] * B[1]", "miedzy aktualna wartoscia, a opozniana def __init__(self, delay, dtype): self.delay = delay self.data", "= A[1] - B[1] return math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci miedzy punktami", "x[0] + x[1] * x[1] #zwraca dlugosc wektora [x, y] def magnitude(x, y=None):", "== 0 and x[1] == 0: return (0, 0) mag = magnitude(x) return", "0) mag = magnitude(x, y) return (x/mag, y/mag) else: if x[0] == 0", "#zwraca odleglosc miedzy punktami A i B def distance(A, B): x = A[0]", "self.size: self.index = 0 #zwraca przefiltrowana wartosc def getValue(self): return Median(self.data) #pozwala na", "- B[1] return x*x + y*y #zwraca znormalizowany wektor [x, y] def normalized(x,", "a value def errorsquare(target, value): size = len(target) sum = 0.0 for i", "return math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci miedzy punktami A i B def", "- B[1] return math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci miedzy punktami A i", "return A[0] * B[1] - A[1] * B[0] #zwraca mediane liczb z tablicy", "self.data = [dtype] * delay self.index = 0 #dodaje element do struktury danych", "y*y #zwraca znormalizowany wektor [x, y] def normalized(x, y=None): if y is not", "priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca element z poczatku kolejki def pop(self):", "(1-c) * a #zwraca dlugosc wektora [x, y] do kwadratu def sqrMagnitude(x, y=None):", "[0.0] * size self.size = size self.index = 0 #dodaje element do tablicy", "_max): if num > _max: return _max elif num < _min: return _min", "kernela filtra def __init__(self, size): self.data = [0.0] * size self.size = size", "if x == 0 and y == 0: return (0, 0) mag =", "self.size = size self.index = 0 #dodaje element do tablicy danych filtra def", "num >= 0: return num * num return -num * num #zwraca kwadrat", "size = len(target) sum = 0.0 for i in range(size): a = int(target[i])", "0.0 def softsign(num): if num < 0: return num / (1 - num)", "wektorami A i B def cross(A, B): return A[0] * B[1] - A[1]", "wartoscia, a opozniana def __init__(self, delay, dtype): self.delay = delay self.data = [dtype]", "num #zwraca wartosc iloczynu skalarnego miedzy wektorami A i B def dot(A, B):", "= int(target[i]) - value[i] sum += a * a return sum #zwraca wartosc", "- value[i] sum += a * a return sum #zwraca wartosc 'num' ograniczana", "* B[1] #zwraca wartosc iloczynu wektorowego miedzy wektorami A i B def cross(A,", "return len(self.elements) == 0 class MedianFilter: #size to wielkosc kernela filtra def __init__(self,", "B def cross(A, B): return A[0] * B[1] - A[1] * B[0] #zwraca", "MATEMATYCZNE import math import heapq #zwrca znak liczby def sign(num): if num >", "delay self.data = [dtype] * delay self.index = 0 #dodaje element do struktury", "FUNKCJE MATEMATYCZNE import math import heapq #zwrca znak liczby def sign(num): if num", "if self.index == self.size: self.index = 0 #zwraca przefiltrowana wartosc def getValue(self): return", "normalized(x, y=None): if y is not None: if x == 0 and y", "num) #zwraca kwadrat liczby, ale z jej znakiem def signedSqr(num): if num >=", "size // 2 return (order[size-1] + order[size]) / 2 return order[size//2] #kolejka priorytetowa", "def empty(self): return len(self.elements) == 0 class MedianFilter: #size to wielkosc kernela filtra", "return (num+1)**2 - 1 return 1 - (1-num)**2 #interpolacja liniowa def lerp(a, b,", "self.data[self.index] = num self.index += 1 if self.index == self.size: self.index = 0", "y=None): if y is not None: return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] +", "Median(data): order = sorted(data) size = len(order) if size % 2 == 0:", "kolejki def pop(self): return heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def empty(self): return len(self.elements)", "if x[0] == 0 and x[1] == 0: return (0, 0) mag =", "mag = magnitude(x) return (x[0]/mag, x[1]/mag) #zwraca roznice kwadratowa miedzy target a value", "A[1] - B[1] return math.sqrt(x*x + y*y) #zwraca kwadrat odleglosci miedzy punktami A", "B): x = A[0] - B[0] y = A[1] - B[1] return math.sqrt(x*x", "_min: return _min return num #zwraca wartosc iloczynu skalarnego miedzy wektorami A i", "return (order[size-1] + order[size]) / 2 return order[size//2] #kolejka priorytetowa class PriorityQueue: def", "wywolane pomiedzy uzyciem 'push' a 'get' def tick(self): self.index += 1 if self.index", "-num * num #zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num", "x*x + y*y return x[0] * x[0] + x[1] * x[1] #zwraca dlugosc", "= 0 #dodaje element do tablicy danych filtra def push(self, num): self.data[self.index] =", "sum = 0.0 for i in range(size): a = int(target[i]) - value[i] sum", "opoznienia w pomiarach class SignalDelay: #'delay' oznacza, ile pomiarow jest miedzy aktualna wartoscia,", "> _max: return _max elif num < _min: return _min return num #zwraca", "odleglosc miedzy punktami A i B def distance(A, B): x = A[0] -", "x[0] == 0 and x[1] == 0: return (0, 0) mag = magnitude(x)", "* x[0] + x[1] * x[1] #zwraca dlugosc wektora [x, y] def magnitude(x,", "< _min: return _min return num #zwraca wartosc iloczynu skalarnego miedzy wektorami A", "def distance(A, B): x = A[0] - B[0] y = A[1] - B[1]", "opozniony o 'delay' def get(self): return self.data[self.index] #symuluje uplyw czasu o 1 jednostke;", "y is not None: return math.sqrt(x*x + y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca", "#czy kolejka jest pusta? def empty(self): return len(self.elements) == 0 class MedianFilter: #size", "def push(self, element): self.data[self.index] = element #zwraca element opozniony o 'delay' def get(self):", "sum += a * a return sum #zwraca wartosc 'num' ograniczana przez <_min,", "wektora [x, y] do kwadratu def sqrMagnitude(x, y=None): if y is not None:", "Median(self.data) #pozwala na symulowanie opoznienia w pomiarach class SignalDelay: #'delay' oznacza, ile pomiarow", "[] #dodaje element do kolejki def push(self, item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje", "liniowa def lerp(a, b, c): return c*b + (1-c) * a #zwraca dlugosc", "uzyciem 'push' a 'get' def tick(self): self.index += 1 if self.index == self.delay:", "o 1 jednostke; musi byc wywolane pomiedzy uzyciem 'push' a 'get' def tick(self):", "0: return 1.0 if num < 0: return -1.0 return 0.0 def softsign(num):", "= 0.0 for i in range(size): a = int(target[i]) - value[i] sum +=", "__init__(self): self.elements = [] #dodaje element do kolejki def push(self, item, priority): heapq.heappush(self.elements,", "x[0] * x[0] + x[1] * x[1] #zwraca dlugosc wektora [x, y] def", "def signedBaseOneSqr(num): if num >= 0: return (num+1)**2 - 1 return 1 -", "pomiarow jest miedzy aktualna wartoscia, a opozniana def __init__(self, delay, dtype): self.delay =", "push(self, item, priority): heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca element z poczatku kolejki", "+= 1 if self.index == self.size: self.index = 0 #zwraca przefiltrowana wartosc def", "None: return x*x + y*y return x[0] * x[0] + x[1] * x[1]", "- B[0] y = A[1] - B[1] return math.sqrt(x*x + y*y) #zwraca kwadrat", "+ num) #zwraca kwadrat liczby, ale z jej znakiem def signedSqr(num): if num", "signedSqr(num): if num >= 0: return num * num return -num * num", "'push' a 'get' def tick(self): self.index += 1 if self.index == self.delay: self.index", "jej znakiem def signedSqr(num): if num >= 0: return num * num return", "and x[1] == 0: return (0, 0) mag = magnitude(x) return (x[0]/mag, x[1]/mag)", "o 'delay' def get(self): return self.data[self.index] #symuluje uplyw czasu o 1 jednostke; musi", "def cross(A, B): return A[0] * B[1] - A[1] * B[0] #zwraca mediane", "def __init__(self, delay, dtype): self.delay = delay self.data = [dtype] * delay self.index", "+= a * a return sum #zwraca wartosc 'num' ograniczana przez <_min, _max>", "danych filtra def push(self, num): self.data[self.index] = num self.index += 1 if self.index", "filtra def push(self, num): self.data[self.index] = num self.index += 1 if self.index ==", "odleglosci miedzy punktami A i B def sqrDistance(A, B): x = A[0] -", "return heapq.heappop(self.elements)[1] #czy kolejka jest pusta? def empty(self): return len(self.elements) == 0 class", "- num) return num / (1 + num) #zwraca kwadrat liczby, ale z", "#zwraca roznice kwadratowa miedzy target a value def errorsquare(target, value): size = len(target)", "num / (1 + num) #zwraca kwadrat liczby, ale z jej znakiem def", "kolejka jest pusta? def empty(self): return len(self.elements) == 0 class MedianFilter: #size to", "* x[1] #zwraca dlugosc wektora [x, y] def magnitude(x, y=None): if y is", "+ A[1] * B[1] #zwraca wartosc iloczynu wektorowego miedzy wektorami A i B", "A[0] - B[0] y = A[1] - B[1] return math.sqrt(x*x + y*y) #zwraca", "* num return -num * num #zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def", "element do struktury danych def push(self, element): self.data[self.index] = element #zwraca element opozniony", "dlugosc wektora [x, y] def magnitude(x, y=None): if y is not None: return", "1 jednostke; musi byc wywolane pomiedzy uzyciem 'push' a 'get' def tick(self): self.index", "'data' def Median(data): order = sorted(data) size = len(order) if size % 2", "#zwraca kwadrat liczby ze znakiem; f(x)=(x+1)^2-1 def signedBaseOneSqr(num): if num >= 0: return", "y == 0: return (0, 0) mag = magnitude(x, y) return (x/mag, y/mag)", "0: size = size // 2 return (order[size-1] + order[size]) / 2 return", "0: return (num+1)**2 - 1 return 1 - (1-num)**2 #interpolacja liniowa def lerp(a,", "(x/mag, y/mag) else: if x[0] == 0 and x[1] == 0: return (0,", "A[0] * B[1] - A[1] * B[0] #zwraca mediane liczb z tablicy 'data'", "A[1] * B[0] #zwraca mediane liczb z tablicy 'data' def Median(data): order =", "class PriorityQueue: def __init__(self): self.elements = [] #dodaje element do kolejki def push(self,", "size self.size = size self.index = 0 #dodaje element do tablicy danych filtra", "return (0, 0) mag = magnitude(x, y) return (x/mag, y/mag) else: if x[0]", "'get' def tick(self): self.index += 1 if self.index == self.delay: self.index = 0", "tablicy danych filtra def push(self, num): self.data[self.index] = num self.index += 1 if", "size self.index = 0 #dodaje element do tablicy danych filtra def push(self, num):", "B): return A[0] * B[0] + A[1] * B[1] #zwraca wartosc iloczynu wektorowego", "i B def sqrDistance(A, B): x = A[0] - B[0] y = A[1]", "heapq.heappush(self.elements, (priority, item)) #zdejmuje i zwraca element z poczatku kolejki def pop(self): return", "1 return 1 - (1-num)**2 #interpolacja liniowa def lerp(a, b, c): return c*b", "signedBaseOneSqr(num): if num >= 0: return (num+1)**2 - 1 return 1 - (1-num)**2", "num): self.data[self.index] = num self.index += 1 if self.index == self.size: self.index =", "def signedSqr(num): if num >= 0: return num * num return -num *", "def softsign(num): if num < 0: return num / (1 - num) return", "% 2 == 0: size = size // 2 return (order[size-1] + order[size])", "0: return (0, 0) mag = magnitude(x, y) return (x/mag, y/mag) else: if", "2 return (order[size-1] + order[size]) / 2 return order[size//2] #kolejka priorytetowa class PriorityQueue:", "= 0 #zwraca przefiltrowana wartosc def getValue(self): return Median(self.data) #pozwala na symulowanie opoznienia", "<_min, _max> def clamp(num, _min, _max): if num > _max: return _max elif", "wartosc iloczynu skalarnego miedzy wektorami A i B def dot(A, B): return A[0]", "y] def normalized(x, y=None): if y is not None: if x == 0", "not None: if x == 0 and y == 0: return (0, 0)", "#symuluje uplyw czasu o 1 jednostke; musi byc wywolane pomiedzy uzyciem 'push' a", "na symulowanie opoznienia w pomiarach class SignalDelay: #'delay' oznacza, ile pomiarow jest miedzy", "jest miedzy aktualna wartoscia, a opozniana def __init__(self, delay, dtype): self.delay = delay", "a = int(target[i]) - value[i] sum += a * a return sum #zwraca", "znak liczby def sign(num): if num > 0: return 1.0 if num <", "= magnitude(x, y) return (x/mag, y/mag) else: if x[0] == 0 and x[1]", "= [] #dodaje element do kolejki def push(self, item, priority): heapq.heappush(self.elements, (priority, item))", "def getValue(self): return Median(self.data) #pozwala na symulowanie opoznienia w pomiarach class SignalDelay: #'delay'", "> 0: return 1.0 if num < 0: return -1.0 return 0.0 def", "+ y*y) return math.sqrt(x[0]*x[0] + x[1]*x[1]) #zwraca odleglosc miedzy punktami A i B", "dtype): self.delay = delay self.data = [dtype] * delay self.index = 0 #dodaje", "class SignalDelay: #'delay' oznacza, ile pomiarow jest miedzy aktualna wartoscia, a opozniana def", "ograniczana przez <_min, _max> def clamp(num, _min, _max): if num > _max: return", "y] do kwadratu def sqrMagnitude(x, y=None): if y is not None: return x*x", "liczby def sign(num): if num > 0: return 1.0 if num < 0:", "x[1] #zwraca dlugosc wektora [x, y] def magnitude(x, y=None): if y is not" ]
[ "0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') params += [{\"params\": [value], \"lr\": lr,", "dtype=np.int) ignore_index = seg_gt != ignore seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index", "{}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim // dim_per_gp else: assert dim %", "if not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized(): return", "dim % num_groups == 0, \\ \"dim: {}, num_groups: {}\".format(dim, num_groups) group_gn =", "np.zeros((num_class, num_class)) for i_label in range(num_class): for i_pred in range(num_class): cur_index = i_label", "key: if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay = 0 logger.info(f'Params:", "exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\" Calcute", "0: assert dim % dim_per_gp == 0, \\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp)", "multi-gpu to reduce the memory cost in the main gpu. You can check", "{}\".format(dim, dim_per_gp) group_gn = dim // dim_per_gp else: assert dim % num_groups ==", "get_optimizer(config, model): _nwd_keys = ('bias', 'bn', 'norm', 'prelu', 'nwd') params = [] for", "= logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model / \\ (cfg_name", "time_str, phase) final_log_file = final_output_dir / log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head)", "'nwd') params = [] for key, value in model.named_parameters(): if not value.requires_grad: continue", "average and current value\"\"\" def __init__(self): self.initialized = False self.val = None self.avg", "= 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') params += [{\"params\": [value], \"lr\":", "(c) Microsoft # Licensed under the MIT License. # Written by <NAME> (<EMAIL>)", "== 0, \\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim // dim_per_gp", "outputs = self.model(inputs) loss = self.loss(outputs, labels) return loss, outputs def get_world_size(): if", "on multi-gpu to reduce the memory cost in the main gpu. You can", "== -1 or num_groups == -1, \\ \"GroupNorm: can only specify G or", "i_pred in range(num_class): cur_index = i_label * num_class + i_pred if cur_index <", "+ time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label,", "{lr}, Weight_Decay: {weight_decay}') elif 'base' in key: if any(key.find(sub) != -1 for sub", "head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler()", "log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console =", "by given label and pred \"\"\" output = pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred", "You can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss): super(FullModel,", "confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix def get_optimizer(config, model): _nwd_keys = ('bias', 'bn',", "NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of groups used by", "\\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim // dim_per_gp else: assert", "cost in the main gpu. You can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\"", "labels) return loss, outputs def get_world_size(): if not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size()", "# set up logger if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset =", "not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name", "'head' in key: lr *= 10 if any(key.find(sub) != -1 for sub in", "// dim_per_gp else: assert dim % num_groups == 0, \\ \"dim: {}, num_groups:", "console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model / \\", "check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss): super(FullModel, self).__init__() self.model", "/ self.count def value(self): return self.val def average(self): return self.avg def create_logger(cfg, cfg_name,", "= None self.avg = None self.sum = None self.count = None def initialize(self,", "*= 10 if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay = 0", "add(self, val, weight): self.val = val self.sum += val * weight self.count +=", "return self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set up logger", "\"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim // dim_per_gp else: assert dim", "root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir", "<NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import from __future__ import division from", "seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index", "as np import torch import torch.nn as nn logger = logging.getLogger(__name__) class FullModel(nn.Module):", "val, weight=1): if not self.initialized: self.initialize(val, weight) else: self.add(val, weight) def add(self, val,", "phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set up logger if not root_output_dir.exists(): print('=> creating", "to reduce the memory cost in the main gpu. You can check the", "momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD", "= self.loss(outputs, labels) return loss, outputs def get_world_size(): if not torch.distributed.is_initialized(): return 1", "not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores the average", "# ------------------------------------------------------------------------------ from __future__ import absolute_import from __future__ import division from __future__ import", "class FullModel(nn.Module): \"\"\" Distribute the loss on multi-gpu to reduce the memory cost", "import division from __future__ import print_function import os import logging import time from", "__init__(self, model, loss): super(FullModel, self).__init__() self.model = model self.loss = loss def forward(self,", "torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores the average and current value\"\"\" def __init__(self):", "LR: {lr}, Weight_Decay: {weight_decay}') params += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}] if", "= pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray(", "= self.model(inputs) loss = self.loss(outputs, labels) return loss, outputs def get_world_size(): if not", "= np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for i_label in range(num_class): for i_pred in", "Weight_Decay: {weight_decay}') elif 'base' in key: if any(key.find(sub) != -1 for sub in", "_nwd_keys = ('bias', 'bn', 'norm', 'prelu', 'nwd') params = [] for key, value", "final_log_file = final_output_dir / log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger =", "str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\" Calcute the confusion matrix", "self.val = val self.avg = val self.sum = val * weight self.count =", "the memory cost in the main gpu. You can check the following discussion.", "self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set up logger if", "== 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER ==", "groups used by GroupNorm, based on number of channels.\"\"\" assert dim_per_gp == -1", "__future__ import absolute_import from __future__ import division from __future__ import print_function import os", "{}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file =", "self.count def value(self): return self.val def average(self): return self.avg def create_logger(cfg, cfg_name, phase='train'):", "cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset / cfg_name print('=> creating {}'.format(final_output_dir))", "time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir / log_file head =", "division from __future__ import print_function import os import logging import time from pathlib", "FullModel(nn.Module): \"\"\" Distribute the loss on multi-gpu to reduce the memory cost in", "if dim_per_gp > 0: assert dim % dim_per_gp == 0, \\ \"dim: {},", "Microsoft # Licensed under the MIT License. # Written by <NAME> (<EMAIL>) #", "= val * weight self.count = weight self.initialized = True def update(self, val,", "= torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam': optimizer =", "torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp, num_groups):", "logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the loss on multi-gpu to reduce", "= time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir / log_file head", "\"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV,", "/ dataset / cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file", "dim_per_gp) group_gn = dim // dim_per_gp else: assert dim % num_groups == 0,", "dim_per_gp > 0: assert dim % dim_per_gp == 0, \\ \"dim: {}, dim_per_gp:", "'{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir / log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file),", "group_gn = dim // dim_per_gp else: assert dim % num_groups == 0, \\", "np import torch import torch.nn as nn logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\"", "self.count += weight self.avg = self.sum / self.count def value(self): return self.val def", "import os import logging import time from pathlib import Path import numpy as", "self.initialized = True def update(self, val, weight=1): if not self.initialized: self.initialize(val, weight) else:", "weight) else: self.add(val, weight) def add(self, val, weight): self.val = val self.sum +=", "_nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') elif 'base' in", "self.val = val self.sum += val * weight self.count += weight self.avg =", "= loss def forward(self, inputs, labels): outputs = self.model(inputs) loss = self.loss(outputs, labels)", "or C/G.\" if dim_per_gp > 0: assert dim % dim_per_gp == 0, \\", "self.val = None self.avg = None self.sum = None self.count = None def", "Copyright (c) Microsoft # Licensed under the MIT License. # Written by <NAME>", "{weight_decay}') elif 'base' in key: if any(key.find(sub) != -1 for sub in _nwd_keys):", "3, 1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]],", "label_count[cur_index] return confusion_matrix def get_optimizer(config, model): _nwd_keys = ('bias', 'bn', 'norm', 'prelu', 'nwd')", "cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str,", "------------------------------------------------------------------------------ from __future__ import absolute_import from __future__ import division from __future__ import print_function", "lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD,", "!= -1 for sub in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr},", "# Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import from __future__", "\"\"\" Distribute the loss on multi-gpu to reduce the memory cost in the", "else: raise NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of groups", "for i_label in range(num_class): for i_pred in range(num_class): cur_index = i_label * num_class", "label_count = np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for i_label in range(num_class): for i_pred", "self).__init__() self.model = model self.loss = loss def forward(self, inputs, labels): outputs =", "def update(self, val, weight=1): if not self.initialized: self.initialize(val, weight) else: self.add(val, weight) def", "for key, value in model.named_parameters(): if not value.requires_grad: continue lr = config.TRAIN.LR weight_decay", "nn logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the loss on multi-gpu to", "\"\"\" output = pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt", "= Path(cfg.LOG_DIR) / dataset / model / \\ (cfg_name + '_' + time_str)", "10 if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay = 0 logger.info(f'Params:", "numpy as np import torch import torch.nn as nn logger = logging.getLogger(__name__) class", "used by GroupNorm, based on number of channels.\"\"\" assert dim_per_gp == -1 or", "based on number of channels.\"\"\" assert dim_per_gp == -1 or num_groups == -1,", "average(self): return self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set up", "= np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt != ignore seg_gt =", "# Licensed under the MIT License. # Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------", "in model.named_parameters(): if not value.requires_grad: continue lr = config.TRAIN.LR weight_decay = config.TRAIN.WD if", "os import logging import time from pathlib import Path import numpy as np", "1 return torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class", "logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model / \\ (cfg_name +", "[value], \"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR,", "nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else:", "weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') params += [{\"params\": [value],", "def average(self): return self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set", "num_class)) for i_label in range(num_class): for i_pred in range(num_class): cur_index = i_label *", "any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR:", "ignore_index = seg_gt != ignore seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index =", "get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of groups used by GroupNorm, based on number", "model.named_parameters(): if not value.requires_grad: continue lr = config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head'", "self.val def average(self): return self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) #", "loss def forward(self, inputs, labels): outputs = self.model(inputs) loss = self.loss(outputs, labels) return", "def get_world_size(): if not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def get_rank(): if not", "as nn logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the loss on multi-gpu", "assert dim % num_groups == 0, \\ \"dim: {}, num_groups: {}\".format(dim, num_groups) group_gn", "value.requires_grad: continue lr = config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head' in key: lr", "or num_groups == -1, \\ \"GroupNorm: can only specify G or C/G.\" if", "val * weight self.count += weight self.avg = self.sum / self.count def value(self):", "ignore seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index = (seg_gt * num_class +", "if cur_index < len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix def get_optimizer(config, model):", "Distribute the loss on multi-gpu to reduce the memory cost in the main", "= None self.sum = None self.count = None def initialize(self, val, weight): self.val", "weight self.initialized = True def update(self, val, weight=1): if not self.initialized: self.initialize(val, weight)", "label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt != ignore seg_gt = seg_gt[ignore_index] seg_pred", "num_groups == 0, \\ \"dim: {}, num_groups: {}\".format(dim, num_groups) group_gn = num_groups return", "None def initialize(self, val, weight): self.val = val self.avg = val self.sum =", "self.avg = val self.sum = val * weight self.count = weight self.initialized =", "model self.loss = loss def forward(self, inputs, labels): outputs = self.model(inputs) loss =", "index = (seg_gt * num_class + seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix = np.zeros((num_class,", "discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss): super(FullModel, self).__init__() self.model = model self.loss", "the MIT License. # Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import", "= '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir / log_file head = '%(asctime)-15s %(message)s'", "in range(num_class): cur_index = i_label * num_class + i_pred if cur_index < len(label_count):", "under the MIT License. # Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__", "dim_per_gp == 0, \\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim //", "confusion matrix by given label and pred \"\"\" output = pred.cpu().numpy().transpose(0, 2, 3,", "cur_index = i_label * num_class + i_pred if cur_index < len(label_count): confusion_matrix[i_label, i_pred]", "optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of groups used by GroupNorm, based", "np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt", "import torch.nn as nn logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the loss", "num_groups): \"\"\"get number of groups used by GroupNorm, based on number of channels.\"\"\"", "config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER", "[] for key, value in model.named_parameters(): if not value.requires_grad: continue lr = config.TRAIN.LR", "outputs def get_world_size(): if not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def get_rank(): if", "__future__ import print_function import os import logging import time from pathlib import Path", "len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix def get_optimizer(config, model): _nwd_keys = ('bias',", "sub in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') params", "inputs, labels): outputs = self.model(inputs) loss = self.loss(outputs, labels) return loss, outputs def", "= val self.avg = val self.sum = val * weight self.count = weight", "model, loss): super(FullModel, self).__init__() self.model = model self.loss = loss def forward(self, inputs,", "+ i_pred if cur_index < len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix def", "= dim // dim_per_gp else: assert dim % num_groups == 0, \\ \"dim:", "cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset /", "torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized(): return 0 return", "% num_groups == 0, \\ \"dim: {}, num_groups: {}\".format(dim, num_groups) group_gn = num_groups", "weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') elif 'base' in key:", "{key}, LR: {lr}, Weight_Decay: {weight_decay}') params += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}]", "cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set up logger if not root_output_dir.exists(): print('=>", "logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR)", "range(num_class): cur_index = i_label * num_class + i_pred if cur_index < len(label_count): confusion_matrix[i_label,", "main gpu. You can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model,", "+ seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for i_label in range(num_class):", "def value(self): return self.val def average(self): return self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir", "weight self.count = weight self.initialized = True def update(self, val, weight=1): if not", "final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir", "== -1, \\ \"GroupNorm: can only specify G or C/G.\" if dim_per_gp >", "the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss): super(FullModel, self).__init__() self.model =", "> 0: assert dim % dim_per_gp == 0, \\ \"dim: {}, dim_per_gp: {}\".format(dim,", "* weight self.count += weight self.avg = self.sum / self.count def value(self): return", "import numpy as np import torch import torch.nn as nn logger = logging.getLogger(__name__)", "self.initialized = False self.val = None self.avg = None self.sum = None self.count", "= config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head' in key: lr *= 10 if", "weight self.avg = self.sum / self.count def value(self): return self.val def average(self): return", "+= [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer =", "/ log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console", "%(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir =", "of groups used by GroupNorm, based on number of channels.\"\"\" assert dim_per_gp ==", "pathlib import Path import numpy as np import torch import torch.nn as nn", "torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params,", "seg_pred[ignore_index] index = (seg_gt * num_class + seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix =", "loss on multi-gpu to reduce the memory cost in the main gpu. You", "creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file", "label and pred \"\"\" output = pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred = np.asarray(np.argmax(output,", "val self.sum = val * weight self.count = weight self.initialized = True def", "can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss): super(FullModel, self).__init__()", "pred \"\"\" output = pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8)", "__future__ import division from __future__ import print_function import os import logging import time", "num_class + i_pred if cur_index < len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix", "root_output_dir / dataset / cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M')", "-1 for sub in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay:", "self.sum = val * weight self.count = weight self.initialized = True def update(self,", "Path(cfg.LOG_DIR) / dataset / model / \\ (cfg_name + '_' + time_str) print('=>", "= [] for key, value in model.named_parameters(): if not value.requires_grad: continue lr =", "val, weight): self.val = val self.avg = val self.sum = val * weight", "= cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset", "if 'head' in key: lr *= 10 if any(key.find(sub) != -1 for sub", "GroupNorm, based on number of channels.\"\"\" assert dim_per_gp == -1 or num_groups ==", "else: self.add(val, weight) def add(self, val, weight): self.val = val self.sum += val", "logger if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model =", "create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set up logger if not root_output_dir.exists():", "self.count = None def initialize(self, val, weight): self.val = val self.avg = val", "final_output_dir = root_output_dir / dataset / cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str", "absolute_import from __future__ import division from __future__ import print_function import os import logging", "= seg_gt != ignore seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index = (seg_gt", "config.TRAIN.WD if 'head' in key: lr *= 10 if any(key.find(sub) != -1 for", "# Copyright (c) Microsoft # Licensed under the MIT License. # Written by", "Calcute the confusion matrix by given label and pred \"\"\" output = pred.cpu().numpy().transpose(0,", "True def update(self, val, weight=1): if not self.initialized: self.initialize(val, weight) else: self.add(val, weight)", "get_rank(): if not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores", "{}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir =", "= Path(cfg.OUTPUT_DIR) # set up logger if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir()", "dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim // dim_per_gp else: assert dim % num_groups", "'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam':", "'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return optimizer def", "= 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') elif 'base' in key: if", "self.add(val, weight) def add(self, val, weight): self.val = val self.sum += val *", "the main gpu. You can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self,", "root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name =", "None self.count = None def initialize(self, val, weight): self.val = val self.avg =", "= None self.count = None def initialize(self, val, weight): self.val = val self.avg", "1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int)", "amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number", "confusion_matrix = np.zeros((num_class, num_class)) for i_label in range(num_class): for i_pred in range(num_class): cur_index", "(seg_gt * num_class + seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for", "and current value\"\"\" def __init__(self): self.initialized = False self.val = None self.avg =", "'_' + time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def", "return confusion_matrix def get_optimizer(config, model): _nwd_keys = ('bias', 'bn', 'norm', 'prelu', 'nwd') params", "of channels.\"\"\" assert dim_per_gp == -1 or num_groups == -1, \\ \"GroupNorm: can", "value(self): return self.val def average(self): return self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir =", "\"\"\" Calcute the confusion matrix by given label and pred \"\"\" output =", "can only specify G or C/G.\" if dim_per_gp > 0: assert dim %", "by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import from __future__ import division", "MIT License. # Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import", "% dim_per_gp == 0, \\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim", "the average and current value\"\"\" def __init__(self): self.initialized = False self.val = None", "weight): self.val = val self.sum += val * weight self.count += weight self.avg", "time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred,", "-1, \\ \"GroupNorm: can only specify G or C/G.\" if dim_per_gp > 0:", "up logger if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model", "get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\" Calcute the confusion matrix by given label", "weight) def add(self, val, weight): self.val = val self.sum += val * weight", "lr = config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head' in key: lr *= 10", "AverageMeter(object): \"\"\"Computes and stores the average and current value\"\"\" def __init__(self): self.initialized =", "return self.val def average(self): return self.avg def create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR)", "creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir", "return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores the average and current value\"\"\" def", "C/G.\" if dim_per_gp > 0: assert dim % dim_per_gp == 0, \\ \"dim:", "and stores the average and current value\"\"\" def __init__(self): self.initialized = False self.val", "\\ \"GroupNorm: can only specify G or C/G.\" if dim_per_gp > 0: assert", "i_pred] = label_count[cur_index] return confusion_matrix def get_optimizer(config, model): _nwd_keys = ('bias', 'bn', 'norm',", "LR: {lr}, Weight_Decay: {weight_decay}') elif 'base' in key: if any(key.find(sub) != -1 for", "self.initialize(val, weight) else: self.add(val, weight) def add(self, val, weight): self.val = val self.sum", "pred, size, num_class, ignore=-1): \"\"\" Calcute the confusion matrix by given label and", "dataset / model / \\ (cfg_name + '_' + time_str) print('=> creating {}'.format(tensorboard_log_dir))", "following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss): super(FullModel, self).__init__() self.model = model", "lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get", "\"\"\"get number of groups used by GroupNorm, based on number of channels.\"\"\" assert", "number of groups used by GroupNorm, based on number of channels.\"\"\" assert dim_per_gp", "\"\"\" def __init__(self, model, loss): super(FullModel, self).__init__() self.model = model self.loss = loss", "weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, )", "i_label in range(num_class): for i_pred in range(num_class): cur_index = i_label * num_class +", "loss = self.loss(outputs, labels) return loss, outputs def get_world_size(): if not torch.distributed.is_initialized(): return", "model): _nwd_keys = ('bias', 'bn', 'norm', 'prelu', 'nwd') params = [] for key,", "output = pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt =", "print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size,", "= np.zeros((num_class, num_class)) for i_label in range(num_class): for i_pred in range(num_class): cur_index =", "seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index = (seg_gt * num_class + seg_pred).astype('int32') label_count =", "import Path import numpy as np import torch import torch.nn as nn logger", "= val self.sum += val * weight self.count += weight self.avg = self.sum", "(cfg_name + '_' + time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir),", "creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class,", "os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset / cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True)", "by GroupNorm, based on number of channels.\"\"\" assert dim_per_gp == -1 or num_groups", "+= val * weight self.count += weight self.avg = self.sum / self.count def", "self.sum += val * weight self.count += weight self.avg = self.sum / self.count", "print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase)", "memory cost in the main gpu. You can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21", "from __future__ import print_function import os import logging import time from pathlib import", "print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0]", "config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return", "dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt != ignore", "= (seg_gt * num_class + seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix = np.zeros((num_class, num_class))", "if not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores the", "self.avg = None self.sum = None self.count = None def initialize(self, val, weight):", "weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD )", "only specify G or C/G.\" if dim_per_gp > 0: assert dim % dim_per_gp", "key, value in model.named_parameters(): if not value.requires_grad: continue lr = config.TRAIN.LR weight_decay =", "loss, outputs def get_world_size(): if not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def get_rank():", "self.sum / self.count def value(self): return self.val def average(self): return self.avg def create_logger(cfg,", "def forward(self, inputs, labels): outputs = self.model(inputs) loss = self.loss(outputs, labels) return loss,", "model / \\ (cfg_name + '_' + time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True)", "dim_per_gp else: assert dim % num_groups == 0, \\ \"dim: {}, num_groups: {}\".format(dim,", "def initialize(self, val, weight): self.val = val self.avg = val self.sum = val", "= self.sum / self.count def value(self): return self.val def average(self): return self.avg def", "in the main gpu. You can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def", "def get_rank(): if not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and", "< len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix def get_optimizer(config, model): _nwd_keys =", "params = [] for key, value in model.named_parameters(): if not value.requires_grad: continue lr", "import time from pathlib import Path import numpy as np import torch import", "def get_optimizer(config, model): _nwd_keys = ('bias', 'bn', 'norm', 'prelu', 'nwd') params = []", "def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of groups used by GroupNorm, based on", "in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') elif 'base'", "'base' in key: if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay =", "/ cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name,", "= '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console)", "Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import from __future__ import", "self.loss = loss def forward(self, inputs, labels): outputs = self.model(inputs) loss = self.loss(outputs,", "gpu. You can check the following discussion. https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss):", "= logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset /", "in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') params +=", "return torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class AverageMeter(object):", "val self.sum += val * weight self.count += weight self.avg = self.sum /", "reduce the memory cost in the main gpu. You can check the following", "dim_per_gp, num_groups): \"\"\"get number of groups used by GroupNorm, based on number of", "= np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index =", "{key}, LR: {lr}, Weight_Decay: {weight_decay}') elif 'base' in key: if any(key.find(sub) != -1", "raise NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of groups used", "logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model /", "specify G or C/G.\" if dim_per_gp > 0: assert dim % dim_per_gp ==", "== 0, \\ \"dim: {}, num_groups: {}\".format(dim, num_groups) group_gn = num_groups return group_gn", "G or C/G.\" if dim_per_gp > 0: assert dim % dim_per_gp == 0,", "'%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir", "= logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the loss on multi-gpu to reduce the", "confusion_matrix def get_optimizer(config, model): _nwd_keys = ('bias', 'bn', 'norm', 'prelu', 'nwd') params =", "not self.initialized: self.initialize(val, weight) else: self.add(val, weight) def add(self, val, weight): self.val =", "= os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset / cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True,", "torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes", "import torch import torch.nn as nn logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute", "get_world_size(): if not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized():", "= ('bias', 'bn', 'norm', 'prelu', 'nwd') params = [] for key, value in", "val self.avg = val self.sum = val * weight self.count = weight self.initialized", "/ \\ (cfg_name + '_' + time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return", "size, num_class, ignore=-1): \"\"\" Calcute the confusion matrix by given label and pred", ") elif config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise", "if not value.requires_grad: continue lr = config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head' in", "= cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset / cfg_name print('=>", "cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset / cfg_name print('=> creating", "weight_decay = config.TRAIN.WD if 'head' in key: lr *= 10 if any(key.find(sub) !=", "torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores the average and", "logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset", "the confusion matrix by given label and pred \"\"\" output = pred.cpu().numpy().transpose(0, 2,", "sub in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') elif", "log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir / log_file head = '%(asctime)-15s", "if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key},", "logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') elif 'base' in key: if any(key.find(sub) !=", "final_output_dir / log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO)", "torch.nn as nn logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the loss on", "logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model / \\ (cfg_name + '_'", "i_label * num_class + i_pred if cur_index < len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index]", "def create_logger(cfg, cfg_name, phase='train'): root_output_dir = Path(cfg.OUTPUT_DIR) # set up logger if not", "'norm', 'prelu', 'nwd') params = [] for key, value in model.named_parameters(): if not", ":size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt != ignore seg_gt = seg_gt[ignore_index] seg_pred =", "elif config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError", "seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for i_label in range(num_class): for", "key: lr *= 10 if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay", "__init__(self): self.initialized = False self.val = None self.avg = None self.sum = None", "dim % dim_per_gp == 0, \\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn =", "import print_function import os import logging import time from pathlib import Path import", "on number of channels.\"\"\" assert dim_per_gp == -1 or num_groups == -1, \\", "= val self.sum = val * weight self.count = weight self.initialized = True", "class AverageMeter(object): \"\"\"Computes and stores the average and current value\"\"\" def __init__(self): self.initialized", "logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the loss on multi-gpu to reduce the memory", "return 0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores the average and current", "0, \\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn = dim // dim_per_gp else:", "channels.\"\"\" assert dim_per_gp == -1 or num_groups == -1, \\ \"GroupNorm: can only", "lr *= 10 if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay =", "Path import numpy as np import torch import torch.nn as nn logger =", "logging import time from pathlib import Path import numpy as np import torch", "= weight self.initialized = True def update(self, val, weight=1): if not self.initialized: self.initialize(val,", "from __future__ import absolute_import from __future__ import division from __future__ import print_function import", "= False self.val = None self.avg = None self.sum = None self.count =", "= model self.loss = loss def forward(self, inputs, labels): outputs = self.model(inputs) loss", "dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir /", "from __future__ import division from __future__ import print_function import os import logging import", "not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized(): return 0", "self.avg = self.sum / self.count def value(self): return self.val def average(self): return self.avg", "in range(num_class): for i_pred in range(num_class): cur_index = i_label * num_class + i_pred", "labels): outputs = self.model(inputs) loss = self.loss(outputs, labels) return loss, outputs def get_world_size():", "= True def update(self, val, weight=1): if not self.initialized: self.initialize(val, weight) else: self.add(val,", "\\ (cfg_name + '_' + time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger,", "def __init__(self): self.initialized = False self.val = None self.avg = None self.sum =", "import logging import time from pathlib import Path import numpy as np import", "return 1 return torch.distributed.get_world_size() def get_rank(): if not torch.distributed.is_initialized(): return 0 return torch.distributed.get_rank()", "forward(self, inputs, labels): outputs = self.model(inputs) loss = self.loss(outputs, labels) return loss, outputs", "= None def initialize(self, val, weight): self.val = val self.avg = val self.sum", "num_groups == -1, \\ \"GroupNorm: can only specify G or C/G.\" if dim_per_gp", "= config.TRAIN.WD if 'head' in key: lr *= 10 if any(key.find(sub) != -1", "def add(self, val, weight): self.val = val self.sum += val * weight self.count", "dataset / cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file =", "return loss, outputs def get_world_size(): if not torch.distributed.is_initialized(): return 1 return torch.distributed.get_world_size() def", "tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\"", "seg_gt != ignore seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index = (seg_gt *", "i_pred if cur_index < len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix def get_optimizer(config,", "value\"\"\" def __init__(self): self.initialized = False self.val = None self.avg = None self.sum", "None self.avg = None self.sum = None self.count = None def initialize(self, val,", "def __init__(self, model, loss): super(FullModel, self).__init__() self.model = model self.loss = loss def", "ignore=-1): \"\"\" Calcute the confusion matrix by given label and pred \"\"\" output", "continue lr = config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head' in key: lr *=", "return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\" Calcute the", "weight): self.val = val self.avg = val self.sum = val * weight self.count", "/ model / \\ (cfg_name + '_' + time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True,", "_nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') params += [{\"params\":", "the loss on multi-gpu to reduce the memory cost in the main gpu.", "= label_count[cur_index] return confusion_matrix def get_optimizer(config, model): _nwd_keys = ('bias', 'bn', 'norm', 'prelu',", "return optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of groups used by GroupNorm,", "matrix by given label and pred \"\"\" output = pred.cpu().numpy().transpose(0, 2, 3, 1)", "not value.requires_grad: continue lr = config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head' in key:", "update(self, val, weight=1): if not self.initialized: self.initialize(val, weight) else: self.add(val, weight) def add(self,", "------------------------------------------------------------------------------ # Copyright (c) Microsoft # Licensed under the MIT License. # Written", "root_output_dir = Path(cfg.OUTPUT_DIR) # set up logger if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir))", "* num_class + seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for i_label", "from pathlib import Path import numpy as np import torch import torch.nn as", "dim // dim_per_gp else: assert dim % num_groups == 0, \\ \"dim: {},", "= i_label * num_class + i_pred if cur_index < len(label_count): confusion_matrix[i_label, i_pred] =", "value in model.named_parameters(): if not value.requires_grad: continue lr = config.TRAIN.LR weight_decay = config.TRAIN.WD", "np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for i_label in range(num_class): for i_pred in range(num_class):", "dim_per_gp == -1 or num_groups == -1, \\ \"GroupNorm: can only specify G", "num_class, ignore=-1): \"\"\" Calcute the confusion matrix by given label and pred \"\"\"", "in key: if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay = 0", "num_class + seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix = np.zeros((num_class, num_class)) for i_label in", "self.model(inputs) loss = self.loss(outputs, labels) return loss, outputs def get_world_size(): if not torch.distributed.is_initialized():", "in key: lr *= 10 if any(key.find(sub) != -1 for sub in _nwd_keys):", "if config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif", "{}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class, ignore=-1):", "[{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params,", "Licensed under the MIT License. # Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from", "= root_output_dir / dataset / cfg_name print('=> creating {}'.format(final_output_dir)) final_output_dir.mkdir(parents=True, exist_ok=True) time_str =", "(<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import from __future__ import division from __future__", "seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index = (seg_gt * num_class + seg_pred).astype('int32')", "False self.val = None self.avg = None self.sum = None self.count = None", "format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) /", "\"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM,", "weight self.count += weight self.avg = self.sum / self.count def value(self): return self.val", "/ dataset / model / \\ (cfg_name + '_' + time_str) print('=> creating", "* num_class + i_pred if cur_index < len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return", "cur_index < len(label_count): confusion_matrix[i_label, i_pred] = label_count[cur_index] return confusion_matrix def get_optimizer(config, model): _nwd_keys", "self.sum = None self.count = None def initialize(self, val, weight): self.val = val", "+ '_' + time_str) print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True, exist_ok=True) return logger, str(final_output_dir), str(tensorboard_log_dir)", "str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\" Calcute the confusion matrix by", "phase) final_log_file = final_output_dir / log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger", "= torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp,", "if not self.initialized: self.initialize(val, weight) else: self.add(val, weight) def add(self, val, weight): self.val", "tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model / \\ (cfg_name + '_' +", "self.count = weight self.initialized = True def update(self, val, weight=1): if not self.initialized:", "None self.sum = None self.count = None def initialize(self, val, weight): self.val =", "time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir / log_file", "elif 'base' in key: if any(key.find(sub) != -1 for sub in _nwd_keys): weight_decay", "\"\"\"Computes and stores the average and current value\"\"\" def __init__(self): self.initialized = False", "optimizer = torch.optim.SGD(params, lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam': optimizer", "https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21 \"\"\" def __init__(self, model, loss): super(FullModel, self).__init__() self.model = model self.loss =", "{lr}, Weight_Decay: {weight_decay}') params += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER", "0 return torch.distributed.get_rank() class AverageMeter(object): \"\"\"Computes and stores the average and current value\"\"\"", "= seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index = (seg_gt * num_class + seg_pred).astype('int32') label_count", "'bn', 'norm', 'prelu', 'nwd') params = [] for key, value in model.named_parameters(): if", "else: assert dim % num_groups == 0, \\ \"dim: {}, num_groups: {}\".format(dim, num_groups)", "super(FullModel, self).__init__() self.model = model self.loss = loss def forward(self, inputs, labels): outputs", "params += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd': optimizer", "-1 or num_groups == -1, \\ \"GroupNorm: can only specify G or C/G.\"", "def get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\" Calcute the confusion matrix by given", "axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt !=", "lr=config.TRAIN.LR, momentum=config.TRAIN.MOMENTUM, weight_decay=config.TRAIN.WD, nesterov=config.TRAIN.NESTEROV, ) elif config.TRAIN.OPTIMIZER == 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR,", "self.initialized: self.initialize(val, weight) else: self.add(val, weight) def add(self, val, weight): self.val = val", "exist_ok=True) time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase) final_log_file = final_output_dir /", "for sub in _nwd_keys): weight_decay = 0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}')", "Weight_Decay: {weight_decay}') params += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER ==", "and pred \"\"\" output = pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred = np.asarray(np.argmax(output, axis=3),", "!= ignore seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index] index = (seg_gt * num_class", "* weight self.count = weight self.initialized = True def update(self, val, weight=1): if", "+= weight self.avg = self.sum / self.count def value(self): return self.val def average(self):", "('bias', 'bn', 'norm', 'prelu', 'nwd') params = [] for key, value in model.named_parameters():", "import absolute_import from __future__ import division from __future__ import print_function import os import", "self.loss(outputs, labels) return loss, outputs def get_world_size(): if not torch.distributed.is_initialized(): return 1 return", "2, 3, 1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2],", "self.model = model self.loss = loss def forward(self, inputs, labels): outputs = self.model(inputs)", "print_function import os import logging import time from pathlib import Path import numpy", ":size[-1]], dtype=np.int) ignore_index = seg_gt != ignore seg_gt = seg_gt[ignore_index] seg_pred = seg_pred[ignore_index]", "== 'adam': optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return optimizer", "0 logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') elif 'base' in key: if any(key.find(sub)", "{weight_decay}') params += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}] if config.TRAIN.OPTIMIZER == 'sgd':", "pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred = np.asarray(np.argmax(output, axis=3), dtype=np.uint8) seg_gt = np.asarray( label.cpu().numpy()[:,", "initialize(self, val, weight): self.val = val self.avg = val self.sum = val *", "'prelu', 'nwd') params = [] for key, value in model.named_parameters(): if not value.requires_grad:", "time from pathlib import Path import numpy as np import torch import torch.nn", "set up logger if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET", "logger, str(final_output_dir), str(tensorboard_log_dir) def get_confusion_matrix(label, pred, size, num_class, ignore=-1): \"\"\" Calcute the confusion", "config.TRAIN.LR weight_decay = config.TRAIN.WD if 'head' in key: lr *= 10 if any(key.find(sub)", "stores the average and current value\"\"\" def __init__(self): self.initialized = False self.val =", "torch import torch.nn as nn logger = logging.getLogger(__name__) class FullModel(nn.Module): \"\"\" Distribute the", "val * weight self.count = weight self.initialized = True def update(self, val, weight=1):", "logger.info(f'Params: {key}, LR: {lr}, Weight_Decay: {weight_decay}') params += [{\"params\": [value], \"lr\": lr, \"weight_decay\":", "\"GroupNorm: can only specify G or C/G.\" if dim_per_gp > 0: assert dim", "seg_pred = seg_pred[ignore_index] index = (seg_gt * num_class + seg_pred).astype('int32') label_count = np.bincount(index)", ") else: raise NotImplementedError return optimizer def get_group_gn(dim, dim_per_gp, num_groups): \"\"\"get number of", "if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset = cfg.DATASET.DATASET model = cfg.MODEL.NAME", "= seg_pred[ignore_index] index = (seg_gt * num_class + seg_pred).astype('int32') label_count = np.bincount(index) confusion_matrix", "val, weight): self.val = val self.sum += val * weight self.count += weight", "logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model", "seg_gt = np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt != ignore seg_gt", "= final_output_dir / log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger()", "model = cfg.MODEL.NAME cfg_name = os.path.basename(cfg_name).split('.')[0] final_output_dir = root_output_dir / dataset / cfg_name", "number of channels.\"\"\" assert dim_per_gp == -1 or num_groups == -1, \\ \"GroupNorm:", "range(num_class): for i_pred in range(num_class): cur_index = i_label * num_class + i_pred if", "current value\"\"\" def __init__(self): self.initialized = False self.val = None self.avg = None", "weight=1): if not self.initialized: self.initialize(val, weight) else: self.add(val, weight) def add(self, val, weight):", "# ------------------------------------------------------------------------------ # Copyright (c) Microsoft # Licensed under the MIT License. #", "given label and pred \"\"\" output = pred.cpu().numpy().transpose(0, 2, 3, 1) seg_pred =", "optimizer = torch.optim.Adam(params, lr=config.TRAIN.LR, amsgrad=config.TRAIN.AMSGRAD ) else: raise NotImplementedError return optimizer def get_group_gn(dim,", "loss): super(FullModel, self).__init__() self.model = model self.loss = loss def forward(self, inputs, labels):", "assert dim_per_gp == -1 or num_groups == -1, \\ \"GroupNorm: can only specify", "for i_pred in range(num_class): cur_index = i_label * num_class + i_pred if cur_index", "assert dim % dim_per_gp == 0, \\ \"dim: {}, dim_per_gp: {}\".format(dim, dim_per_gp) group_gn", "np.asarray( label.cpu().numpy()[:, :size[-2], :size[-1]], dtype=np.int) ignore_index = seg_gt != ignore seg_gt = seg_gt[ignore_index]", "Path(cfg.OUTPUT_DIR) # set up logger if not root_output_dir.exists(): print('=> creating {}'.format(root_output_dir)) root_output_dir.mkdir() dataset", "License. # Written by <NAME> (<EMAIL>) # ------------------------------------------------------------------------------ from __future__ import absolute_import from" ]
[ "[ ('supports', '0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport',", "-*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models", "state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support',", "] operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState(", "class Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions( name='baseobjectssupport',", "), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField(", "unicode_literals from django.db import migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [", "to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0, to='supports.Support'), preserve_default=False, ), ], database_operations=[]", "utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import ralph.lib.mixins.fields", "name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False,", "import unicode_literals from django.db import migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies =", "coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import", "= [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField(", "Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={},", "[ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport',", "table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ),", "('supports', '0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None,", "django.db import migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'),", "migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject',", "models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'), ] operations =", "operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[", "field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0, to='supports.Support'), preserve_default=False, ),", "dependencies = [ ('supports', '0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ),", "-*- from __future__ import unicode_literals from django.db import migrations, models import ralph.lib.mixins.fields class", "related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0, to='supports.Support'), preserve_default=False, ), ], database_operations=[] ),", "migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'), ] operations", "ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions(", "), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject',", "import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'), ] operations = [", "preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0, to='supports.Support'), preserve_default=False, ), ], database_operations=[] ), ]", "= [ ('supports', '0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable(", "name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0, to='supports.Support'), preserve_default=False,", "from __future__ import unicode_literals from django.db import migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration):", "'0005_auto_20160105_1222'), ] operations = [ migrations.AlterModelOptions( name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ),", "migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport',", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations,", "<filename>src/ralph/supports/migrations/0006_auto_20160615_0805.py # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import", "name='baseobjectssupport', options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0,", "verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0, to='supports.Support'), preserve_default=False, ), ],", "from django.db import migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [ ('supports',", "migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'),", "__future__ import unicode_literals from django.db import migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies", "import migrations, models import ralph.lib.mixins.fields class Migration(migrations.Migration): dependencies = [ ('supports', '0005_auto_20160105_1222'), ]", "options={}, ), migrations.AlterModelTable( name='baseobjectssupport', table=None, ), migrations.SeparateDatabaseAndState( state_operations=[ migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset',", "model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0, to='supports.Support'),", "migrations.AddField( model_name='baseobjectssupport', name='baseobject', field=ralph.lib.mixins.fields.BaseObjectForeignKey(default=0, verbose_name='Asset', to='assets.BaseObject', related_name='supports'), preserve_default=False, ), migrations.AddField( model_name='baseobjectssupport', name='support', field=models.ForeignKey(default=0," ]
[ "f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters: - '/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types():", "{os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations} if module.lower() == cur_module: name = typ.name", "f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ # ::: {full_name} \"\"\")) if typ.locations:", "file_filters: - '/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for", "SF selection: file_filters: - '/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename,", "f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ #", "selection: file_filters: - '/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\"))", "\"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\"", "in typ.locations} if module.lower() == cur_module: name = typ.name full_name = typ.abs_id path", "<filename>tools/gen_doc_files.py import os import textwrap import mkdocs_gen_files root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\",", "- 1 print(\" \" * indent + f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\"", "if module.lower() == cur_module: name = typ.name full_name = typ.abs_id path = full_name.replace(\"::\",", "with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters: - '/{module.lower()}/'", "f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters: - '/{module.lower()}/' \"\"\")) for typ in", "root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\", \"Window\", \"Graphics\",", "as f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters: - '/{module.lower()}/' \"\"\")) for typ", "module.lower() == cur_module: name = typ.name full_name = typ.abs_id path = full_name.replace(\"::\", \"/\")", "path = full_name.replace(\"::\", \"/\") indent = bool(module) + full_name.count(\"::\") - 1 print(\" \"", "with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ # ::: {full_name} \"\"\")) if typ.locations: mkdocs_gen_files.set_edit_path(filename,", "+ f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\", "= mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\", \"Window\", \"Graphics\", \"Audio\",", "for loc in typ.locations} if module.lower() == cur_module: name = typ.name full_name =", "= {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations} if module.lower() == cur_module: name =", "== cur_module: name = typ.name full_name = typ.abs_id path = full_name.replace(\"::\", \"/\") indent", "textwrap import mkdocs_gen_files root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in", "# ::: SF selection: file_filters: - '/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module]", "1 print(\" \" * indent + f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with", "loc in typ.locations} if module.lower() == cur_module: name = typ.name full_name = typ.abs_id", "mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters: - '/{module.lower()}/' \"\"\"))", "\"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations}", "typ.locations} if module.lower() == cur_module: name = typ.name full_name = typ.abs_id path =", "file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ # ::: {full_name}", "typ.name full_name = typ.abs_id path = full_name.replace(\"::\", \"/\") indent = bool(module) + full_name.count(\"::\")", "indent = bool(module) + full_name.count(\"::\") - 1 print(\" \" * indent + f\"*", "full_name.replace(\"::\", \"/\") indent = bool(module) + full_name.count(\"::\") - 1 print(\" \" * indent", "import textwrap import mkdocs_gen_files root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module", "bool(module) + full_name.count(\"::\") - 1 print(\" \" * indent + f\"* [{name}]({path}.md)\", file=nav)", "print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" # ::: SF", "mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ # ::: {full_name} \"\"\")) if typ.locations: mkdocs_gen_files.set_edit_path(filename, typ.locations[0].url)", "= full_name.replace(\"::\", \"/\") indent = bool(module) + full_name.count(\"::\") - 1 print(\" \" *", "\"w\") for module in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if module: print(f\"*", "for module in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if module: print(f\"* [{module}", "typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations} if module.lower()", "indent + f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f:", "if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" #", "\"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\",", "\"Audio\", \"Network\", \"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as", "[{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ # :::", "full_name.count(\"::\") - 1 print(\" \" * indent + f\"* [{name}]({path}.md)\", file=nav) filename =", "name = typ.name full_name = typ.abs_id path = full_name.replace(\"::\", \"/\") indent = bool(module)", "typ.abs_id path = full_name.replace(\"::\", \"/\") indent = bool(module) + full_name.count(\"::\") - 1 print(\"", "mkdocs_gen_files root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\", \"Window\",", "+ full_name.count(\"::\") - 1 print(\" \" * indent + f\"* [{name}]({path}.md)\", file=nav) filename", "mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if module:", "import os import textwrap import mkdocs_gen_files root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\")", "\"src\")) for loc in typ.locations} if module.lower() == cur_module: name = typ.name full_name", "= typ.abs_id path = full_name.replace(\"::\", \"/\") indent = bool(module) + full_name.count(\"::\") - 1", "in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations} if module.lower() ==", "module in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\",", "[{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection:", "for typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations} if", "module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters:", "'/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in", "cur_module: name = typ.name full_name = typ.abs_id path = full_name.replace(\"::\", \"/\") indent =", "- '/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc", "module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" # :::", "\" * indent + f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\")", "\"Graphics\", \"Audio\", \"Network\", \"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\")", "file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters: -", "\"w\") as f: f.write(textwrap.dedent(f\"\"\" # ::: SF selection: file_filters: - '/{module.lower()}/' \"\"\")) for", "nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]:", "root.lookup(\"SF\").walk_types(): [cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations} if module.lower() == cur_module:", "= mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if", "= typ.name full_name = typ.abs_id path = full_name.replace(\"::\", \"/\") indent = bool(module) +", "* indent + f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as", "\"Network\", \"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with mkdocs_gen_files.open(f\"api/{module.lower()}.md\", \"w\") as f:", "in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav)", "filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ # ::: {full_name} \"\"\"))", "::: SF selection: file_filters: - '/{module.lower()}/' \"\"\")) for typ in root.lookup(\"SF\").walk_types(): [cur_module] =", "= bool(module) + full_name.count(\"::\") - 1 print(\" \" * indent + f\"* [{name}]({path}.md)\",", "os import textwrap import mkdocs_gen_files root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for", "print(\" \" * indent + f\"* [{name}]({path}.md)\", file=nav) filename = f\"api/{path}.md\" with mkdocs_gen_files.open(filename,", "\"/\") indent = bool(module) + full_name.count(\"::\") - 1 print(\" \" * indent +", "= f\"api/{path}.md\" with mkdocs_gen_files.open(filename, \"w\") as f: f.write(textwrap.dedent(f\"\"\"\\ # ::: {full_name} \"\"\")) if", "mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\",", "[\"System\", \"Window\", \"Graphics\", \"Audio\", \"Network\", \"\"]: if module: print(f\"* [{module} module]({module.lower()}.md)\", file=nav) with", "full_name = typ.abs_id path = full_name.replace(\"::\", \"/\") indent = bool(module) + full_name.count(\"::\") -", "[cur_module] = {os.path.dirname(os.path.relpath(loc.filename, \"src\")) for loc in typ.locations} if module.lower() == cur_module: name", "import mkdocs_gen_files root = mkdocs_gen_files.config[\"plugins\"][\"mkdocstrings\"].get_handler(\"crystal\").collector.root nav = mkdocs_gen_files.open(f\"api/index.md\", \"w\") for module in [\"System\"," ]
[ "import sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line in INPUT: sys.stdout.write(line) INPUT.close()", "__init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir,", "H = TaskH() w = pyutilib.workflow.Workflow() w.add(H) print(w(dir=currdir+'dummy')) # @:ex if os.path.exists(currdir+'logfile'): os.remove(currdir+'logfile')", "open(currdir+'logfile','r') for line in INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort() H = TaskH()", "pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = []", "currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line in", "*args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile')", "sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line in INPUT: sys.stdout.write(line) INPUT.close() else: # @ex:", "sys.stdout.write(line) INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self,", "INPUT: sys.stdout.write(line) INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\"", "import pyutilib.workflow import os.path import os currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'):", "in INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort() H = TaskH() w = pyutilib.workflow.Workflow()", "self.list.append( line.strip() ) INPUT.close() self.list.sort() H = TaskH() w = pyutilib.workflow.Workflow() w.add(H) print(w(dir=currdir+'dummy'))", ") INPUT.close() self.list.sort() H = TaskH() w = pyutilib.workflow.Workflow() w.add(H) print(w(dir=currdir+'dummy')) # @:ex", "**kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list", "[] INPUT = open(currdir+'logfile','r') for line in INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort()", "self.list = [] INPUT = open(currdir+'logfile','r') for line in INPUT: self.list.append( line.strip() )", "self.list.sort() H = TaskH() w = pyutilib.workflow.Workflow() w.add(H) print(w(dir=currdir+'dummy')) # @:ex if os.path.exists(currdir+'logfile'):", "INPUT = open('example7.txt','r') for line in INPUT: sys.stdout.write(line) INPUT.close() else: # @ex: class", "self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT = open(currdir+'logfile','r')", "line.strip() ) INPUT.close() self.list.sort() H = TaskH() w = pyutilib.workflow.Workflow() w.add(H) print(w(dir=currdir+'dummy')) #", "self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT = open(currdir+'logfile','r') for line", "self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT = open(currdir+'logfile','r') for", "self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT = open(currdir+'logfile','r') for line in INPUT: self.list.append(", "<gh_stars>10-100 import pyutilib.workflow import os.path import os currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if", "TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def", "sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line in INPUT: sys.stdout.write(line) INPUT.close() else:", "import os currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for", "os currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line", "import os.path import os currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT =", "execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT = open(currdir+'logfile','r') for line in INPUT:", "= open(currdir+'logfile','r') for line in INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort() H =", "= os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line in INPUT:", "if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line in INPUT: sys.stdout.write(line) INPUT.close() else: #", "def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self):", "@ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list')", "class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls'))", "for line in INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort() H = TaskH() w", "os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r') for line in INPUT: sys.stdout.write(line)", "in INPUT: sys.stdout.write(line) INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds):", "line in INPUT: sys.stdout.write(line) INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args,", "\"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list =", "pyutilib.workflow import os.path import os currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT", "**kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT =", "def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT = open(currdir+'logfile','r') for line in", "= [] INPUT = open(currdir+'logfile','r') for line in INPUT: self.list.append( line.strip() ) INPUT.close()", "INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args,", "= open('example7.txt','r') for line in INPUT: sys.stdout.write(line) INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task):", "for line in INPUT: sys.stdout.write(line) INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task): def __init__(self,", "*args, **kwds) self.inputs.declare('dir') self.outputs.declare('list') self.add_resource(pyutilib.workflow.ExecutableResource('ls')) def execute(self): self.resource('ls').run(self.dir, logfile=currdir+'logfile') self.list = [] INPUT", "open('example7.txt','r') for line in INPUT: sys.stdout.write(line) INPUT.close() else: # @ex: class TaskH(pyutilib.workflow.Task): def", "line in INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort() H = TaskH() w =", "else: # @ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds)", "logfile=currdir+'logfile') self.list = [] INPUT = open(currdir+'logfile','r') for line in INPUT: self.list.append( line.strip()", "INPUT = open(currdir+'logfile','r') for line in INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort() H", "INPUT: self.list.append( line.strip() ) INPUT.close() self.list.sort() H = TaskH() w = pyutilib.workflow.Workflow() w.add(H)", "INPUT.close() self.list.sort() H = TaskH() w = pyutilib.workflow.Workflow() w.add(H) print(w(dir=currdir+'dummy')) # @:ex if", "# @ex: class TaskH(pyutilib.workflow.Task): def __init__(self, *args, **kwds): \"\"\"Constructor.\"\"\" pyutilib.workflow.Task.__init__(self, *args, **kwds) self.inputs.declare('dir')", "os.path import os currdir = os.path.dirname(os.path.abspath(__file__))+os.sep import sys if sys.platform.startswith('win'): INPUT = open('example7.txt','r')" ]
[ "in required_assocs.items(): model, field = details query = {field: params.pop(assoc)} params[assoc] = model.objects.get(**query)", "params[assoc] = model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if created: set_custom_fields_values(interface, custom_field_data) print(\"🧷 Created", "params in interfaces: custom_field_data = pop_custom_fields(params) for assoc, details in required_assocs.items(): model, field", "= details query = {field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params)", "sys from startup_script_utils import load_yaml, pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces", "startup_script_utils import load_yaml, pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\")", "interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")}", "from virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit()", "required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for params in interfaces: custom_field_data = pop_custom_fields(params) for", "assoc, details in required_assocs.items(): model, field = details query = {field: params.pop(assoc)} params[assoc]", "query = {field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if created:", "interface, created = VMInterface.objects.get_or_create(**params) if created: set_custom_fields_values(interface, custom_field_data) print(\"🧷 Created interface\", interface.name, interface.virtual_machine.name)", "(VirtualMachine, \"name\")} for params in interfaces: custom_field_data = pop_custom_fields(params) for assoc, details in", "params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if created: set_custom_fields_values(interface, custom_field_data) print(\"🧷", "= {\"virtual_machine\": (VirtualMachine, \"name\")} for params in interfaces: custom_field_data = pop_custom_fields(params) for assoc,", "model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if created: set_custom_fields_values(interface, custom_field_data) print(\"🧷 Created interface\", interface.name,", "load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for params", "sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for params in interfaces: custom_field_data = pop_custom_fields(params)", "interfaces: custom_field_data = pop_custom_fields(params) for assoc, details in required_assocs.items(): model, field = details", "from startup_script_utils import load_yaml, pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces =", "custom_field_data = pop_custom_fields(params) for assoc, details in required_assocs.items(): model, field = details query", "model, field = details query = {field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created", "import load_yaml, pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if", "VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine,", "import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit() required_assocs =", "details query = {field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if", "pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is", "= model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if created: set_custom_fields_values(interface, custom_field_data) print(\"🧷 Created interface\",", "field = details query = {field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created =", "= pop_custom_fields(params) for assoc, details in required_assocs.items(): model, field = details query =", "details in required_assocs.items(): model, field = details query = {field: params.pop(assoc)} params[assoc] =", "None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for params in interfaces: custom_field_data =", "interfaces is None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for params in interfaces:", "is None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for params in interfaces: custom_field_data", "{field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if created: set_custom_fields_values(interface, custom_field_data)", "virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit() required_assocs", "\"name\")} for params in interfaces: custom_field_data = pop_custom_fields(params) for assoc, details in required_assocs.items():", "set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None:", "in interfaces: custom_field_data = pop_custom_fields(params) for assoc, details in required_assocs.items(): model, field =", "import sys from startup_script_utils import load_yaml, pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface", "VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit() required_assocs = {\"virtual_machine\":", "required_assocs.items(): model, field = details query = {field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface,", "{\"virtual_machine\": (VirtualMachine, \"name\")} for params in interfaces: custom_field_data = pop_custom_fields(params) for assoc, details", "load_yaml, pop_custom_fields, set_custom_fields_values from virtualization.models import VirtualMachine, VMInterface interfaces = load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces", "for params in interfaces: custom_field_data = pop_custom_fields(params) for assoc, details in required_assocs.items(): model,", "pop_custom_fields(params) for assoc, details in required_assocs.items(): model, field = details query = {field:", "if interfaces is None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for params in", "= {field: params.pop(assoc)} params[assoc] = model.objects.get(**query) interface, created = VMInterface.objects.get_or_create(**params) if created: set_custom_fields_values(interface,", "for assoc, details in required_assocs.items(): model, field = details query = {field: params.pop(assoc)}", "= load_yaml(\"/opt/netbox/initializers/virtualization_interfaces.yml\") if interfaces is None: sys.exit() required_assocs = {\"virtual_machine\": (VirtualMachine, \"name\")} for" ]
[ "self, ) -> bytes: try: chunk = next(self.chunks) except: self.chunks = next( self.lines,", "[ Node(i) for i in range(n) ] edges = [ [] for _", "import ( dataclass, ) @dataclass class Node: id_: int = None @dataclass class", "typing import ( Generator, NoReturn ) class StdReader: def __init__( self, ) ->", "chunk def str( self, ) -> str: b = self() return b.decode() def", "GraphBFS: level: List[int] def __init__( self, graph: Graph, ): self.g = graph self.inf", "self.init_level() self.level[src] = 0 self.set_queue() que = self.queue que.append(src) while que: x =", "import ( deque, ) class GraphBFS: level: List[int] def __init__( self, graph: Graph,", "solve(self): c = self.c self.moves = (-c, -1, 1, c) self.make_graph() print(self.calc_dist()) def", "gen = self.line_chunks() yield gen def line_chunks( self, ) -> Generator: ln =", "int( self, ) -> int: return int(self.str()) from abc import ( ABC, abstractmethod,", "1 gy = reader.int() - 1 gx = reader.int() - 1 maze =", "= Graph(n) for i in range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g = g", "= [ Node(i) for i in range(n) ] edges = [ [] for", "= [None] * r for i in range(r): maze[i] = reader.str() maze =", "int = 1 capacity: int = 0 @dataclass class Graph: nodes: List[Node] edges:", "g def gen_edges( self, i: int, ): edges = [] maze = self.maze", "str( self, ) -> str: b = self() return b.decode() def int( self,", "chunk = next(self.chunks) except: self.chunks = next( self.lines, ) chunk = self() return", "-1, 1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int: g = self.g c", "ABC, abstractmethod, ) class Solver(ABC): def __init__(self): self.reader = StdReader() def __call__( self,", "def __init__( self, ) -> NoReturn: import sys self.buf = sys.stdin.buffer self.lines =", "import ( ABC, abstractmethod, ) class Solver(ABC): def __init__(self): self.reader = StdReader() def", "class GraphBFS: level: List[int] def __init__( self, graph: Graph, ): self.g = graph", "int, ): g = self.g lv = self.level que = self.queue for e", "@dataclass class Edge: id_: int = None from_ : int = ... to:", "def async_readlines( self, ) -> Generator: while True: gen = self.line_chunks() yield gen", "self.prepare() self.solve() @abstractmethod def prepare(self): ... @abstractmethod def solve(self): ... import numpy as", "len(self.nodes) from collections import ( deque, ) class GraphBFS: level: List[int] def __init__(", "__call__( self, ): self.prepare() self.solve() @abstractmethod def prepare(self): ... @abstractmethod def solve(self): ...", "+ self.sx dst = self.gy * c + self.gx bfs = GraphBFS(graph=g) bfs.search(src)", "( self.async_readlines() ) self.chunks: Generator def async_readlines( self, ) -> Generator: while True:", "line_chunks( self, ) -> Generator: ln = self.buf.readline() for chunk in ln.split(): yield", ") self.chunks: Generator def async_readlines( self, ) -> Generator: while True: gen =", "= ( self.async_readlines() ) self.chunks: Generator def async_readlines( self, ) -> Generator: while", "i: int, ): edges = [] maze = self.maze if maze[i] == '#':", "self.reader = StdReader() def __call__( self, ): self.prepare() self.solve() @abstractmethod def prepare(self): ...", "from collections import ( deque, ) class GraphBFS: level: List[int] def __init__( self,", "que = self.queue for e in g.edges[u]: v = e.to if lv[v] is", "e in g.edges[u]: v = e.to if lv[v] is not None: continue lv[v]", "self, src: int, ): self.init_level() self.level[src] = 0 self.set_queue() que = self.queue que.append(src)", "int: g = self.g c = self.c src = self.sy * c +", "maze def solve(self): c = self.c self.moves = (-c, -1, 1, c) self.make_graph()", "[] for _ in range(n) ] self.nodes = nodes self.edges = edges def", "add_edges( self, edges: List[Edge], ): for e in edges: self.add_edge(e) @property def size(self):", "graph self.inf = float('inf') def search( self, src: int, ): self.init_level() self.level[src] =", "= float('inf') def search( self, src: int, ): self.init_level() self.level[src] = 0 self.set_queue()", "e.to if lv[v] is not None: continue lv[v] = lv[u] + 1 que.append(v)", "class Node: id_: int = None @dataclass class Edge: id_: int = None", "sx self.gy = gy self.gx = gx self.maze = maze def solve(self): c", "1 sx = reader.int() - 1 gy = reader.int() - 1 gx =", "-> Generator: while True: gen = self.line_chunks() yield gen def line_chunks( self, )", "= ''.join(maze) self.r = r self.c = c self.sy = sy self.sx =", "chunk = self() return chunk def str( self, ) -> str: b =", "edges: self.add_edge(e) @property def size(self): return len(self.nodes) from collections import ( deque, )", "return chunk def str( self, ) -> str: b = self() return b.decode()", "g = self.g c = self.c src = self.sy * c + self.sx", "self, ) -> Generator: ln = self.buf.readline() for chunk in ln.split(): yield chunk", "dataclass, ) @dataclass class Node: id_: int = None @dataclass class Edge: id_:", "bfs.level[dst] return dist def make_graph( self, ): r, c = self.r, self.c n", "b = self() return b.decode() def int( self, ) -> int: return int(self.str())", "reader.str() maze = ''.join(maze) self.r = r self.c = c self.sy = sy", "self.c src = self.sy * c + self.sx dst = self.gy * c", "= self.c self.moves = (-c, -1, 1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self) ->", "c) self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int: g = self.g c = self.c", "e: Edge, ): i = e.from_ self.edges[i].append(e) def add_edges( self, edges: List[Edge], ):", "lv[v] = lv[u] + 1 que.append(v) def set_queue(self): que = deque() self.queue =", "e = Edge( from_ = i, to = j, ) edges.append(e) return edges", "def size(self): return len(self.nodes) from collections import ( deque, ) class GraphBFS: level:", "def main(): t = 1 # t = StdReader().int() for _ in range(t):", "= lv[u] + 1 que.append(v) def set_queue(self): que = deque() self.queue = que", ") -> bytes: try: chunk = next(self.chunks) except: self.chunks = next( self.lines, )", "gx = reader.int() - 1 maze = [None] * r for i in", "Solver(ABC): def __init__(self): self.reader = StdReader() def __call__( self, ): self.prepare() self.solve() @abstractmethod", "lv[v] is not None: continue lv[v] = lv[u] + 1 que.append(v) def set_queue(self):", "in self.moves: j = i + d if maze[j] == '#': continue e", "try: chunk = next(self.chunks) except: self.chunks = next( self.lines, ) chunk = self()", "= g def gen_edges( self, i: int, ): edges = [] maze =", "List[List[Edge]] def __init__( self, n: int, ): nodes = [ Node(i) for i", "List[Node] edges: List[List[Edge]] def __init__( self, n: int, ): nodes = [ Node(i)", "- 1 gx = reader.int() - 1 maze = [None] * r for", "__init__(self): self.reader = StdReader() def __call__( self, ): self.prepare() self.solve() @abstractmethod def prepare(self):", "self.edges = edges def add_edge( self, e: Edge, ): i = e.from_ self.edges[i].append(e)", "1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int: g = self.g c =", "NoReturn ) class StdReader: def __init__( self, ) -> NoReturn: import sys self.buf", "= e.from_ self.edges[i].append(e) def add_edges( self, edges: List[Edge], ): for e in edges:", "_ in range(n) ] self.nodes = nodes self.edges = edges def add_edge( self,", "0 @dataclass class Graph: nodes: List[Node] edges: List[List[Edge]] def __init__( self, n: int,", "self, n: int, ): nodes = [ Node(i) for i in range(n) ]", "): i = e.from_ self.edges[i].append(e) def add_edges( self, edges: List[Edge], ): for e", "Graph: nodes: List[Node] edges: List[List[Edge]] def __init__( self, n: int, ): nodes =", "e.from_ self.edges[i].append(e) def add_edges( self, edges: List[Edge], ): for e in edges: self.add_edge(e)", "capacity: int = 0 @dataclass class Graph: nodes: List[Node] edges: List[List[Edge]] def __init__(", "class StdReader: def __init__( self, ) -> NoReturn: import sys self.buf = sys.stdin.buffer", "bfs = GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return dist def make_graph( self, ):", "sy self.sx = sx self.gy = gy self.gx = gx self.maze = maze", "gx self.maze = maze def solve(self): c = self.c self.moves = (-c, -1,", "1 gx = reader.int() - 1 maze = [None] * r for i", "GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return dist def make_graph( self, ): r, c", "main(): t = 1 # t = StdReader().int() for _ in range(t): Problem()()", "src: int, ): self.init_level() self.level[src] = 0 self.set_queue() que = self.queue que.append(src) while", "return edges def main(): t = 1 # t = StdReader().int() for _", "lv class Problem( Solver, ): def prepare(self): reader = self.reader r = reader.int()", "for i in range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g = g def gen_edges(", "search( self, src: int, ): self.init_level() self.level[src] = 0 self.set_queue() que = self.queue", "None from_ : int = ... to: int = ... weight: int =", "for e in g.edges[u]: v = e.to if lv[v] is not None: continue", "= (-c, -1, 1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int: g =", "True: gen = self.line_chunks() yield gen def line_chunks( self, ) -> Generator: ln", "gen_edges( self, i: int, ): edges = [] maze = self.maze if maze[i]", "yield gen def line_chunks( self, ) -> Generator: ln = self.buf.readline() for chunk", "1 # t = StdReader().int() for _ in range(t): Problem()() if __name__ ==", "): for e in edges: self.add_edge(e) @property def size(self): return len(self.nodes) from collections", "bytes: try: chunk = next(self.chunks) except: self.chunks = next( self.lines, ) chunk =", "self, ) -> str: b = self() return b.decode() def int( self, )", "lv = self.level que = self.queue for e in g.edges[u]: v = e.to", "= j, ) edges.append(e) return edges def main(): t = 1 # t", ") class GraphBFS: level: List[int] def __init__( self, graph: Graph, ): self.g =", "lv[u] + 1 que.append(v) def set_queue(self): que = deque() self.queue = que def", "= que.popleft() self.explore(x) def explore( self, u: int, ): g = self.g lv", "int, ): edges = [] maze = self.maze if maze[i] == '#': return", "nodes self.edges = edges def add_edge( self, e: Edge, ): i = e.from_", "g = Graph(n) for i in range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g =", "sy = reader.int() - 1 sx = reader.int() - 1 gy = reader.int()", "* r for i in range(r): maze[i] = reader.str() maze = ''.join(maze) self.r", "from abc import ( ABC, abstractmethod, ) class Solver(ABC): def __init__(self): self.reader =", "Generator, NoReturn ) class StdReader: def __init__( self, ) -> NoReturn: import sys", "c = self.r, self.c n = r * c g = Graph(n) for", "def prepare(self): reader = self.reader r = reader.int() c = reader.int() sy =", "np from typing import ( List, ) from dataclasses import ( dataclass, )", "- 1 maze = [None] * r for i in range(r): maze[i] =", "def search( self, src: int, ): self.init_level() self.level[src] = 0 self.set_queue() que =", "self.maze = maze def solve(self): c = self.c self.moves = (-c, -1, 1,", "= self.g lv = self.level que = self.queue for e in g.edges[u]: v", "print(self.calc_dist()) def calc_dist(self) -> int: g = self.g c = self.c src =", "= self.gy * c + self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst]", "ln.split(): yield chunk def __call__( self, ) -> bytes: try: chunk = next(self.chunks)", "Solver, ): def prepare(self): reader = self.reader r = reader.int() c = reader.int()", "self.lines = ( self.async_readlines() ) self.chunks: Generator def async_readlines( self, ) -> Generator:", "range(n) ] self.nodes = nodes self.edges = edges def add_edge( self, e: Edge,", "c g = Graph(n) for i in range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g", "NoReturn: import sys self.buf = sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks: Generator", "Problem( Solver, ): def prepare(self): reader = self.reader r = reader.int() c =", "nodes: List[Node] edges: List[List[Edge]] def __init__( self, n: int, ): nodes = [", "= i, to = j, ) edges.append(e) return edges def main(): t =", "= None @dataclass class Edge: id_: int = None from_ : int =", "def add_edge( self, e: Edge, ): i = e.from_ self.edges[i].append(e) def add_edges( self,", "u: int, ): g = self.g lv = self.level que = self.queue for", "= que def init_level(self): lv = [None] * self.g.size self.level = lv class", "@property def size(self): return len(self.nodes) from collections import ( deque, ) class GraphBFS:", "chunk in ln.split(): yield chunk def __call__( self, ) -> bytes: try: chunk", "for e in edges: self.add_edge(e) @property def size(self): return len(self.nodes) from collections import", "* self.g.size self.level = lv class Problem( Solver, ): def prepare(self): reader =", "= r * c g = Graph(n) for i in range(n): edges =", "'#': return edges for d in self.moves: j = i + d if", "( Generator, NoReturn ) class StdReader: def __init__( self, ) -> NoReturn: import", "in ln.split(): yield chunk def __call__( self, ) -> bytes: try: chunk =", "): self.g = graph self.inf = float('inf') def search( self, src: int, ):", "= reader.int() - 1 gx = reader.int() - 1 maze = [None] *", "== '#': return edges for d in self.moves: j = i + d", "from typing import ( Generator, NoReturn ) class StdReader: def __init__( self, )", "not None: continue lv[v] = lv[u] + 1 que.append(v) def set_queue(self): que =", "in g.edges[u]: v = e.to if lv[v] is not None: continue lv[v] =", "v = e.to if lv[v] is not None: continue lv[v] = lv[u] +", "- 1 sx = reader.int() - 1 gy = reader.int() - 1 gx", "class Edge: id_: int = None from_ : int = ... to: int", "self.g lv = self.level que = self.queue for e in g.edges[u]: v =", "reader.int() c = reader.int() sy = reader.int() - 1 sx = reader.int() -", "que def init_level(self): lv = [None] * self.g.size self.level = lv class Problem(", "1 que.append(v) def set_queue(self): que = deque() self.queue = que def init_level(self): lv", "def __call__( self, ) -> bytes: try: chunk = next(self.chunks) except: self.chunks =", "Node(i) for i in range(n) ] edges = [ [] for _ in", "... import numpy as np from typing import ( List, ) from dataclasses", "class Graph: nodes: List[Node] edges: List[List[Edge]] def __init__( self, n: int, ): nodes", "maze = [None] * r for i in range(r): maze[i] = reader.str() maze", "sx = reader.int() - 1 gy = reader.int() - 1 gx = reader.int()", ") from typing import ( Generator, NoReturn ) class StdReader: def __init__( self,", "dst = self.gy * c + self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist =", "self() return b.decode() def int( self, ) -> int: return int(self.str()) from abc", "int: return int(self.str()) from abc import ( ABC, abstractmethod, ) class Solver(ABC): def", "next( self.lines, ) chunk = self() return chunk def str( self, ) ->", "[None] * self.g.size self.level = lv class Problem( Solver, ): def prepare(self): reader", "if maze[j] == '#': continue e = Edge( from_ = i, to =", "= gy self.gx = gx self.maze = maze def solve(self): c = self.c", "List[Edge], ): for e in edges: self.add_edge(e) @property def size(self): return len(self.nodes) from", ") -> Generator: ln = self.buf.readline() for chunk in ln.split(): yield chunk def", "-> Generator: ln = self.buf.readline() for chunk in ln.split(): yield chunk def __call__(", "self.async_readlines() ) self.chunks: Generator def async_readlines( self, ) -> Generator: while True: gen", "@dataclass class Node: id_: int = None @dataclass class Edge: id_: int =", "in range(r): maze[i] = reader.str() maze = ''.join(maze) self.r = r self.c =", "prepare(self): ... @abstractmethod def solve(self): ... import numpy as np from typing import", "from dataclasses import ( dataclass, ) @dataclass class Node: id_: int = None", "self, ) -> int: return int(self.str()) from abc import ( ABC, abstractmethod, )", "(-c, -1, 1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int: g = self.g", "self.g = g def gen_edges( self, i: int, ): edges = [] maze", "reader.int() - 1 sx = reader.int() - 1 gy = reader.int() - 1", "( List, ) from dataclasses import ( dataclass, ) @dataclass class Node: id_:", "self.chunks = next( self.lines, ) chunk = self() return chunk def str( self,", "g.edges[u]: v = e.to if lv[v] is not None: continue lv[v] = lv[u]", "async_readlines( self, ) -> Generator: while True: gen = self.line_chunks() yield gen def", "''.join(maze) self.r = r self.c = c self.sy = sy self.sx = sx", "import sys self.buf = sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks: Generator def", "r * c g = Graph(n) for i in range(n): edges = self.gen_edges(i)", "= self.maze if maze[i] == '#': return edges for d in self.moves: j", "int = ... weight: int = 1 capacity: int = 0 @dataclass class", "self.buf = sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks: Generator def async_readlines( self,", "self, graph: Graph, ): self.g = graph self.inf = float('inf') def search( self,", "if lv[v] is not None: continue lv[v] = lv[u] + 1 que.append(v) def", "self.g.size self.level = lv class Problem( Solver, ): def prepare(self): reader = self.reader", "( ABC, abstractmethod, ) class Solver(ABC): def __init__(self): self.reader = StdReader() def __call__(", "StdReader() def __call__( self, ): self.prepare() self.solve() @abstractmethod def prepare(self): ... @abstractmethod def", "self.c n = r * c g = Graph(n) for i in range(n):", "nodes = [ Node(i) for i in range(n) ] edges = [ []", "Generator: while True: gen = self.line_chunks() yield gen def line_chunks( self, ) ->", "self.r, self.c n = r * c g = Graph(n) for i in", "= self.gen_edges(i) g.add_edges(edges) self.g = g def gen_edges( self, i: int, ): edges", "d in self.moves: j = i + d if maze[j] == '#': continue", "self.g c = self.c src = self.sy * c + self.sx dst =", "ln = self.buf.readline() for chunk in ln.split(): yield chunk def __call__( self, )", "return edges for d in self.moves: j = i + d if maze[j]", "src = self.sy * c + self.sx dst = self.gy * c +", "Edge( from_ = i, to = j, ) edges.append(e) return edges def main():", "int = ... to: int = ... weight: int = 1 capacity: int", "= next(self.chunks) except: self.chunks = next( self.lines, ) chunk = self() return chunk", "def int( self, ) -> int: return int(self.str()) from abc import ( ABC,", "= lv class Problem( Solver, ): def prepare(self): reader = self.reader r =", "def __call__( self, ): self.prepare() self.solve() @abstractmethod def prepare(self): ... @abstractmethod def solve(self):", "def line_chunks( self, ) -> Generator: ln = self.buf.readline() for chunk in ln.split():", "__init__( self, n: int, ): nodes = [ Node(i) for i in range(n)", "self.sy = sy self.sx = sx self.gy = gy self.gx = gx self.maze", "while que: x = que.popleft() self.explore(x) def explore( self, u: int, ): g", "range(r): maze[i] = reader.str() maze = ''.join(maze) self.r = r self.c = c", "= sx self.gy = gy self.gx = gx self.maze = maze def solve(self):", ") class StdReader: def __init__( self, ) -> NoReturn: import sys self.buf =", "que = deque() self.queue = que def init_level(self): lv = [None] * self.g.size", "edges.append(e) return edges def main(): t = 1 # t = StdReader().int() for", "def __init__( self, graph: Graph, ): self.g = graph self.inf = float('inf') def", "= Edge( from_ = i, to = j, ) edges.append(e) return edges def", "self.moves = (-c, -1, 1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int: g", "bfs.search(src) dist = bfs.level[dst] return dist def make_graph( self, ): r, c =", "def explore( self, u: int, ): g = self.g lv = self.level que", "self.g = graph self.inf = float('inf') def search( self, src: int, ): self.init_level()", "): g = self.g lv = self.level que = self.queue for e in", "from __future__ import ( annotations, ) from typing import ( Generator, NoReturn )", "que.append(src) while que: x = que.popleft() self.explore(x) def explore( self, u: int, ):", "None @dataclass class Edge: id_: int = None from_ : int = ...", "edges def add_edge( self, e: Edge, ): i = e.from_ self.edges[i].append(e) def add_edges(", "continue lv[v] = lv[u] + 1 que.append(v) def set_queue(self): que = deque() self.queue", "= c self.sy = sy self.sx = sx self.gy = gy self.gx =", "-> int: return int(self.str()) from abc import ( ABC, abstractmethod, ) class Solver(ABC):", "def str( self, ) -> str: b = self() return b.decode() def int(", "Graph, ): self.g = graph self.inf = float('inf') def search( self, src: int,", "self.r = r self.c = c self.sy = sy self.sx = sx self.gy", "= 0 self.set_queue() que = self.queue que.append(src) while que: x = que.popleft() self.explore(x)", "= reader.int() - 1 maze = [None] * r for i in range(r):", "= 1 capacity: int = 0 @dataclass class Graph: nodes: List[Node] edges: List[List[Edge]]", "i in range(n) ] edges = [ [] for _ in range(n) ]", "r for i in range(r): maze[i] = reader.str() maze = ''.join(maze) self.r =", "self.line_chunks() yield gen def line_chunks( self, ) -> Generator: ln = self.buf.readline() for", "que.append(v) def set_queue(self): que = deque() self.queue = que def init_level(self): lv =", "g = self.g lv = self.level que = self.queue for e in g.edges[u]:", "): edges = [] maze = self.maze if maze[i] == '#': return edges", "StdReader: def __init__( self, ) -> NoReturn: import sys self.buf = sys.stdin.buffer self.lines", "r = reader.int() c = reader.int() sy = reader.int() - 1 sx =", "edges for d in self.moves: j = i + d if maze[j] ==", "float('inf') def search( self, src: int, ): self.init_level() self.level[src] = 0 self.set_queue() que", "__init__( self, ) -> NoReturn: import sys self.buf = sys.stdin.buffer self.lines = (", "class Solver(ABC): def __init__(self): self.reader = StdReader() def __call__( self, ): self.prepare() self.solve()", "[ [] for _ in range(n) ] self.nodes = nodes self.edges = edges", "= sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks: Generator def async_readlines( self, )", "int = 0 @dataclass class Graph: nodes: List[Node] edges: List[List[Edge]] def __init__( self,", "self.sy * c + self.sx dst = self.gy * c + self.gx bfs", "( annotations, ) from typing import ( Generator, NoReturn ) class StdReader: def", "j, ) edges.append(e) return edges def main(): t = 1 # t =", "edges def main(): t = 1 # t = StdReader().int() for _ in", "None: continue lv[v] = lv[u] + 1 que.append(v) def set_queue(self): que = deque()", "sys self.buf = sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks: Generator def async_readlines(", "make_graph( self, ): r, c = self.r, self.c n = r * c", "= self.line_chunks() yield gen def line_chunks( self, ) -> Generator: ln = self.buf.readline()", "to: int = ... weight: int = 1 capacity: int = 0 @dataclass", "reader.int() - 1 maze = [None] * r for i in range(r): maze[i]", "import ( annotations, ) from typing import ( Generator, NoReturn ) class StdReader:", "annotations, ) from typing import ( Generator, NoReturn ) class StdReader: def __init__(", "= None from_ : int = ... to: int = ... weight: int", "self.chunks: Generator def async_readlines( self, ) -> Generator: while True: gen = self.line_chunks()", "self, i: int, ): edges = [] maze = self.maze if maze[i] ==", "self, ): self.prepare() self.solve() @abstractmethod def prepare(self): ... @abstractmethod def solve(self): ... import", "int = None from_ : int = ... to: int = ... weight:", "yield chunk def __call__( self, ) -> bytes: try: chunk = next(self.chunks) except:", "int = None @dataclass class Edge: id_: int = None from_ : int", "[] maze = self.maze if maze[i] == '#': return edges for d in", "dataclasses import ( dataclass, ) @dataclass class Node: id_: int = None @dataclass", "int, ): self.init_level() self.level[src] = 0 self.set_queue() que = self.queue que.append(src) while que:", "g.add_edges(edges) self.g = g def gen_edges( self, i: int, ): edges = []", "edges: List[Edge], ): for e in edges: self.add_edge(e) @property def size(self): return len(self.nodes)", "self, u: int, ): g = self.g lv = self.level que = self.queue", "0 self.set_queue() que = self.queue que.append(src) while que: x = que.popleft() self.explore(x) def", "deque, ) class GraphBFS: level: List[int] def __init__( self, graph: Graph, ): self.g", "Graph(n) for i in range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g = g def", "in edges: self.add_edge(e) @property def size(self): return len(self.nodes) from collections import ( deque,", "def __init__(self): self.reader = StdReader() def __call__( self, ): self.prepare() self.solve() @abstractmethod def", "-> bytes: try: chunk = next(self.chunks) except: self.chunks = next( self.lines, ) chunk", "self.buf.readline() for chunk in ln.split(): yield chunk def __call__( self, ) -> bytes:", "edges = self.gen_edges(i) g.add_edges(edges) self.g = g def gen_edges( self, i: int, ):", "self.level que = self.queue for e in g.edges[u]: v = e.to if lv[v]", "b.decode() def int( self, ) -> int: return int(self.str()) from abc import (", ") @dataclass class Node: id_: int = None @dataclass class Edge: id_: int", "__init__( self, graph: Graph, ): self.g = graph self.inf = float('inf') def search(", "= [None] * self.g.size self.level = lv class Problem( Solver, ): def prepare(self):", "dist def make_graph( self, ): r, c = self.r, self.c n = r", "continue e = Edge( from_ = i, to = j, ) edges.append(e) return", "r self.c = c self.sy = sy self.sx = sx self.gy = gy", "size(self): return len(self.nodes) from collections import ( deque, ) class GraphBFS: level: List[int]", "self, ): r, c = self.r, self.c n = r * c g", "): def prepare(self): reader = self.reader r = reader.int() c = reader.int() sy", "= reader.str() maze = ''.join(maze) self.r = r self.c = c self.sy =", "def solve(self): ... import numpy as np from typing import ( List, )", "self.set_queue() que = self.queue que.append(src) while que: x = que.popleft() self.explore(x) def explore(", "i, to = j, ) edges.append(e) return edges def main(): t = 1", "-> NoReturn: import sys self.buf = sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks:", "maze[i] = reader.str() maze = ''.join(maze) self.r = r self.c = c self.sy", "numpy as np from typing import ( List, ) from dataclasses import (", "= self.reader r = reader.int() c = reader.int() sy = reader.int() - 1", "j = i + d if maze[j] == '#': continue e = Edge(", "= 0 @dataclass class Graph: nodes: List[Node] edges: List[List[Edge]] def __init__( self, n:", "self.maze if maze[i] == '#': return edges for d in self.moves: j =", "id_: int = None @dataclass class Edge: id_: int = None from_ :", "from_ : int = ... to: int = ... weight: int = 1", "c = self.c src = self.sy * c + self.sx dst = self.gy", "reader.int() - 1 gy = reader.int() - 1 gx = reader.int() - 1", "== '#': continue e = Edge( from_ = i, to = j, )", "= gx self.maze = maze def solve(self): c = self.c self.moves = (-c,", "next(self.chunks) except: self.chunks = next( self.lines, ) chunk = self() return chunk def", "gy = reader.int() - 1 gx = reader.int() - 1 maze = [None]", "range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g = g def gen_edges( self, i: int,", "= [] maze = self.maze if maze[i] == '#': return edges for d", "return len(self.nodes) from collections import ( deque, ) class GraphBFS: level: List[int] def", "__future__ import ( annotations, ) from typing import ( Generator, NoReturn ) class", "self, edges: List[Edge], ): for e in edges: self.add_edge(e) @property def size(self): return", "List, ) from dataclasses import ( dataclass, ) @dataclass class Node: id_: int", "import ( Generator, NoReturn ) class StdReader: def __init__( self, ) -> NoReturn:", "in range(n) ] edges = [ [] for _ in range(n) ] self.nodes", "gen def line_chunks( self, ) -> Generator: ln = self.buf.readline() for chunk in", "i in range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g = g def gen_edges( self,", "que.popleft() self.explore(x) def explore( self, u: int, ): g = self.g lv =", "que: x = que.popleft() self.explore(x) def explore( self, u: int, ): g =", "): self.prepare() self.solve() @abstractmethod def prepare(self): ... @abstractmethod def solve(self): ... import numpy", "for chunk in ln.split(): yield chunk def __call__( self, ) -> bytes: try:", "in range(n): edges = self.gen_edges(i) g.add_edges(edges) self.g = g def gen_edges( self, i:", "maze = self.maze if maze[i] == '#': return edges for d in self.moves:", ") -> int: return int(self.str()) from abc import ( ABC, abstractmethod, ) class", "maze = ''.join(maze) self.r = r self.c = c self.sy = sy self.sx", "= ... weight: int = 1 capacity: int = 0 @dataclass class Graph:", "n: int, ): nodes = [ Node(i) for i in range(n) ] edges", "for i in range(r): maze[i] = reader.str() maze = ''.join(maze) self.r = r", "+ self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return dist def make_graph(", "def prepare(self): ... @abstractmethod def solve(self): ... import numpy as np from typing", "def solve(self): c = self.c self.moves = (-c, -1, 1, c) self.make_graph() print(self.calc_dist())", "= StdReader() def __call__( self, ): self.prepare() self.solve() @abstractmethod def prepare(self): ... @abstractmethod", "= graph self.inf = float('inf') def search( self, src: int, ): self.init_level() self.level[src]", "] edges = [ [] for _ in range(n) ] self.nodes = nodes", "= [ [] for _ in range(n) ] self.nodes = nodes self.edges =", "[None] * r for i in range(r): maze[i] = reader.str() maze = ''.join(maze)", "= self.buf.readline() for chunk in ln.split(): yield chunk def __call__( self, ) ->", "except: self.chunks = next( self.lines, ) chunk = self() return chunk def str(", "add_edge( self, e: Edge, ): i = e.from_ self.edges[i].append(e) def add_edges( self, edges:", "abstractmethod, ) class Solver(ABC): def __init__(self): self.reader = StdReader() def __call__( self, ):", "= next( self.lines, ) chunk = self() return chunk def str( self, )", "calc_dist(self) -> int: g = self.g c = self.c src = self.sy *", ") from dataclasses import ( dataclass, ) @dataclass class Node: id_: int =", "e in edges: self.add_edge(e) @property def size(self): return len(self.nodes) from collections import (", ") class Solver(ABC): def __init__(self): self.reader = StdReader() def __call__( self, ): self.prepare()", "abc import ( ABC, abstractmethod, ) class Solver(ABC): def __init__(self): self.reader = StdReader()", "= self.queue que.append(src) while que: x = que.popleft() self.explore(x) def explore( self, u:", "from typing import ( List, ) from dataclasses import ( dataclass, ) @dataclass", "= sy self.sx = sx self.gy = gy self.gx = gx self.maze =", "self.level[src] = 0 self.set_queue() que = self.queue que.append(src) while que: x = que.popleft()", "self.sx = sx self.gy = gy self.gx = gx self.maze = maze def", "Generator: ln = self.buf.readline() for chunk in ln.split(): yield chunk def __call__( self,", "def add_edges( self, edges: List[Edge], ): for e in edges: self.add_edge(e) @property def", "prepare(self): reader = self.reader r = reader.int() c = reader.int() sy = reader.int()", "= bfs.level[dst] return dist def make_graph( self, ): r, c = self.r, self.c", "x = que.popleft() self.explore(x) def explore( self, u: int, ): g = self.g", ") chunk = self() return chunk def str( self, ) -> str: b", "self.level = lv class Problem( Solver, ): def prepare(self): reader = self.reader r", "= reader.int() - 1 sx = reader.int() - 1 gy = reader.int() -", ") -> str: b = self() return b.decode() def int( self, ) ->", "init_level(self): lv = [None] * self.g.size self.level = lv class Problem( Solver, ):", "import numpy as np from typing import ( List, ) from dataclasses import", "solve(self): ... import numpy as np from typing import ( List, ) from", "weight: int = 1 capacity: int = 0 @dataclass class Graph: nodes: List[Node]", "for i in range(n) ] edges = [ [] for _ in range(n)", "self.c self.moves = (-c, -1, 1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int:", "return dist def make_graph( self, ): r, c = self.r, self.c n =", "): r, c = self.r, self.c n = r * c g =", "from_ = i, to = j, ) edges.append(e) return edges def main(): t", "self.explore(x) def explore( self, u: int, ): g = self.g lv = self.level", "self.inf = float('inf') def search( self, src: int, ): self.init_level() self.level[src] = 0", "in range(n) ] self.nodes = nodes self.edges = edges def add_edge( self, e:", "self.sx dst = self.gy * c + self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist", "edges: List[List[Edge]] def __init__( self, n: int, ): nodes = [ Node(i) for", "Generator def async_readlines( self, ) -> Generator: while True: gen = self.line_chunks() yield", "self.edges[i].append(e) def add_edges( self, edges: List[Edge], ): for e in edges: self.add_edge(e) @property", "while True: gen = self.line_chunks() yield gen def line_chunks( self, ) -> Generator:", "maze[i] == '#': return edges for d in self.moves: j = i +", "sys.stdin.buffer self.lines = ( self.async_readlines() ) self.chunks: Generator def async_readlines( self, ) ->", "edges = [ [] for _ in range(n) ] self.nodes = nodes self.edges", "c = reader.int() sy = reader.int() - 1 sx = reader.int() - 1", "lv = [None] * self.g.size self.level = lv class Problem( Solver, ): def", "-> str: b = self() return b.decode() def int( self, ) -> int:", "i in range(r): maze[i] = reader.str() maze = ''.join(maze) self.r = r self.c", "graph: Graph, ): self.g = graph self.inf = float('inf') def search( self, src:", "Node: id_: int = None @dataclass class Edge: id_: int = None from_", "= self.queue for e in g.edges[u]: v = e.to if lv[v] is not", "= self.c src = self.sy * c + self.sx dst = self.gy *", "- 1 gy = reader.int() - 1 gx = reader.int() - 1 maze", "... @abstractmethod def solve(self): ... import numpy as np from typing import (", "if maze[i] == '#': return edges for d in self.moves: j = i", "( deque, ) class GraphBFS: level: List[int] def __init__( self, graph: Graph, ):", "d if maze[j] == '#': continue e = Edge( from_ = i, to", "chunk def __call__( self, ) -> bytes: try: chunk = next(self.chunks) except: self.chunks", "t = StdReader().int() for _ in range(t): Problem()() if __name__ == '__main__': main()", "is not None: continue lv[v] = lv[u] + 1 que.append(v) def set_queue(self): que", "self.c = c self.sy = sy self.sx = sx self.gy = gy self.gx", "= deque() self.queue = que def init_level(self): lv = [None] * self.g.size self.level", "Edge, ): i = e.from_ self.edges[i].append(e) def add_edges( self, edges: List[Edge], ): for", "def make_graph( self, ): r, c = self.r, self.c n = r *", "self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return dist def make_graph( self,", "as np from typing import ( List, ) from dataclasses import ( dataclass,", "= e.to if lv[v] is not None: continue lv[v] = lv[u] + 1", "self.moves: j = i + d if maze[j] == '#': continue e =", "( dataclass, ) @dataclass class Node: id_: int = None @dataclass class Edge:", "* c + self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return dist", "r, c = self.r, self.c n = r * c g = Graph(n)", "1 capacity: int = 0 @dataclass class Graph: nodes: List[Node] edges: List[List[Edge]] def", "# t = StdReader().int() for _ in range(t): Problem()() if __name__ == '__main__':", "self.lines, ) chunk = self() return chunk def str( self, ) -> str:", "edges = [] maze = self.maze if maze[i] == '#': return edges for", "= reader.int() sy = reader.int() - 1 sx = reader.int() - 1 gy", "= 1 # t = StdReader().int() for _ in range(t): Problem()() if __name__", "def calc_dist(self) -> int: g = self.g c = self.c src = self.sy", "@abstractmethod def prepare(self): ... @abstractmethod def solve(self): ... import numpy as np from", "def __init__( self, n: int, ): nodes = [ Node(i) for i in", "i + d if maze[j] == '#': continue e = Edge( from_ =", "int, ): nodes = [ Node(i) for i in range(n) ] edges =", "for d in self.moves: j = i + d if maze[j] == '#':", "self.gx = gx self.maze = maze def solve(self): c = self.c self.moves =", "= reader.int() - 1 gy = reader.int() - 1 gx = reader.int() -", "def init_level(self): lv = [None] * self.g.size self.level = lv class Problem( Solver,", "1 maze = [None] * r for i in range(r): maze[i] = reader.str()", "def gen_edges( self, i: int, ): edges = [] maze = self.maze if", "t = 1 # t = StdReader().int() for _ in range(t): Problem()() if", "... to: int = ... weight: int = 1 capacity: int = 0", "self, ) -> NoReturn: import sys self.buf = sys.stdin.buffer self.lines = ( self.async_readlines()", "def set_queue(self): que = deque() self.queue = que def init_level(self): lv = [None]", "maze[j] == '#': continue e = Edge( from_ = i, to = j,", "level: List[int] def __init__( self, graph: Graph, ): self.g = graph self.inf =", "... weight: int = 1 capacity: int = 0 @dataclass class Graph: nodes:", "self.queue for e in g.edges[u]: v = e.to if lv[v] is not None:", ": int = ... to: int = ... weight: int = 1 capacity:", "+ 1 que.append(v) def set_queue(self): que = deque() self.queue = que def init_level(self):", "= GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return dist def make_graph( self, ): r,", "i = e.from_ self.edges[i].append(e) def add_edges( self, edges: List[Edge], ): for e in", "reader.int() - 1 gx = reader.int() - 1 maze = [None] * r", "@dataclass class Graph: nodes: List[Node] edges: List[List[Edge]] def __init__( self, n: int, ):", ") edges.append(e) return edges def main(): t = 1 # t = StdReader().int()", "self() return chunk def str( self, ) -> str: b = self() return", "): self.init_level() self.level[src] = 0 self.set_queue() que = self.queue que.append(src) while que: x", "c + self.sx dst = self.gy * c + self.gx bfs = GraphBFS(graph=g)", "@abstractmethod def solve(self): ... import numpy as np from typing import ( List,", "= self() return chunk def str( self, ) -> str: b = self()", "self.add_edge(e) @property def size(self): return len(self.nodes) from collections import ( deque, ) class", "= reader.int() c = reader.int() sy = reader.int() - 1 sx = reader.int()", "c self.sy = sy self.sx = sx self.gy = gy self.gx = gx", "'#': continue e = Edge( from_ = i, to = j, ) edges.append(e)", "return int(self.str()) from abc import ( ABC, abstractmethod, ) class Solver(ABC): def __init__(self):", "= nodes self.edges = edges def add_edge( self, e: Edge, ): i =", "= maze def solve(self): c = self.c self.moves = (-c, -1, 1, c)", "n = r * c g = Graph(n) for i in range(n): edges", ") -> Generator: while True: gen = self.line_chunks() yield gen def line_chunks( self,", "self, e: Edge, ): i = e.from_ self.edges[i].append(e) def add_edges( self, edges: List[Edge],", "__call__( self, ) -> bytes: try: chunk = next(self.chunks) except: self.chunks = next(", "c = self.c self.moves = (-c, -1, 1, c) self.make_graph() print(self.calc_dist()) def calc_dist(self)", "] self.nodes = nodes self.edges = edges def add_edge( self, e: Edge, ):", "List[int] def __init__( self, graph: Graph, ): self.g = graph self.inf = float('inf')", "dist = bfs.level[dst] return dist def make_graph( self, ): r, c = self.r,", "set_queue(self): que = deque() self.queue = que def init_level(self): lv = [None] *", "class Problem( Solver, ): def prepare(self): reader = self.reader r = reader.int() c", "reader.int() sy = reader.int() - 1 sx = reader.int() - 1 gy =", "gy self.gx = gx self.maze = maze def solve(self): c = self.c self.moves", "int(self.str()) from abc import ( ABC, abstractmethod, ) class Solver(ABC): def __init__(self): self.reader", "self.gen_edges(i) g.add_edges(edges) self.g = g def gen_edges( self, i: int, ): edges =", ") -> NoReturn: import sys self.buf = sys.stdin.buffer self.lines = ( self.async_readlines() )", "): nodes = [ Node(i) for i in range(n) ] edges = [", "str: b = self() return b.decode() def int( self, ) -> int: return", "explore( self, u: int, ): g = self.g lv = self.level que =", "range(n) ] edges = [ [] for _ in range(n) ] self.nodes =", "= r self.c = c self.sy = sy self.sx = sx self.gy =", "= self.g c = self.c src = self.sy * c + self.sx dst", "Edge: id_: int = None from_ : int = ... to: int =", "self.gy * c + self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return", "+ d if maze[j] == '#': continue e = Edge( from_ = i,", "reader = self.reader r = reader.int() c = reader.int() sy = reader.int() -", "to = j, ) edges.append(e) return edges def main(): t = 1 #", "import ( List, ) from dataclasses import ( dataclass, ) @dataclass class Node:", "= self.level que = self.queue for e in g.edges[u]: v = e.to if", "c + self.gx bfs = GraphBFS(graph=g) bfs.search(src) dist = bfs.level[dst] return dist def", "self.nodes = nodes self.edges = edges def add_edge( self, e: Edge, ): i", "id_: int = None from_ : int = ... to: int = ...", "self.queue que.append(src) while que: x = que.popleft() self.explore(x) def explore( self, u: int,", "self.queue = que def init_level(self): lv = [None] * self.g.size self.level = lv", "return b.decode() def int( self, ) -> int: return int(self.str()) from abc import", "self.reader r = reader.int() c = reader.int() sy = reader.int() - 1 sx", "= self.sy * c + self.sx dst = self.gy * c + self.gx", "deque() self.queue = que def init_level(self): lv = [None] * self.g.size self.level =", "-> int: g = self.g c = self.c src = self.sy * c", "* c g = Graph(n) for i in range(n): edges = self.gen_edges(i) g.add_edges(edges)", "= ... to: int = ... weight: int = 1 capacity: int =", "self, ) -> Generator: while True: gen = self.line_chunks() yield gen def line_chunks(", "= edges def add_edge( self, e: Edge, ): i = e.from_ self.edges[i].append(e) def", "= self.r, self.c n = r * c g = Graph(n) for i", "self.make_graph() print(self.calc_dist()) def calc_dist(self) -> int: g = self.g c = self.c src", "que = self.queue que.append(src) while que: x = que.popleft() self.explore(x) def explore( self,", "self.gy = gy self.gx = gx self.maze = maze def solve(self): c =", "typing import ( List, ) from dataclasses import ( dataclass, ) @dataclass class", "collections import ( deque, ) class GraphBFS: level: List[int] def __init__( self, graph:", "= self() return b.decode() def int( self, ) -> int: return int(self.str()) from", "self.solve() @abstractmethod def prepare(self): ... @abstractmethod def solve(self): ... import numpy as np", "* c + self.sx dst = self.gy * c + self.gx bfs =", "for _ in range(n) ] self.nodes = nodes self.edges = edges def add_edge(", "= i + d if maze[j] == '#': continue e = Edge( from_" ]
[ "* diff + strides[i] # Chech each dimension for compatibility. A dimension length", "len(unique) > 2: # There must be at least two non-1 lengths for", "diff + strides[i] # Chech each dimension for compatibility. A dimension length of", "for compatibility. A dimension length of 1 is # accepted as compatible with", "import afnumpy import numpy def broadcast_arrays(*args, **kwargs): subok = kwargs.pop('subok', False) if kwargs:", "The common shape will take # this value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length)", "dimension for compatibility. A dimension length of 1 is # accepted as compatible", "s in shapes] unique = set(lengths + [1]) if len(unique) > 2: #", "i in range(len(args)): diff = biggest - nds[i] if diff > 0: shapes[i]", "= [s[axis] for s in shapes] unique = set(lengths + [1]) if len(unique)", "Go through each array and prepend dimensions of length 1 to each of", "afnumpy import numpy def broadcast_arrays(*args, **kwargs): subok = kwargs.pop('subok', False) if kwargs: raise", "in args] nds = [len(s) for s in shapes] biggest = max(nds) #", "There must be at least two non-1 lengths for this axis. raise ValueError(\"shape", "range(biggest): lengths = [s[axis] for s in shapes] unique = set(lengths + [1])", "nothing is broadcasted. common_shape.append(1) # Construct the new arrays. broadcasted = [] for", "s in shapes] strides = [list(x.strides) for x in args] nds = [len(s)", "alone as nothing is broadcasted. common_shape.append(1) # Construct the new arrays. broadcasted =", "broadcasted. return args shapes = [list(s) for s in shapes] strides = [list(x.strides)", "> 0: shapes[i] = [1] * diff + shapes[i] strides[i] = [0] *", "kwargs: raise TypeError('broadcast_arrays() got an unexpected keyword ' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m,", "that it repeats its # data. for i in range(len(args)): if shapes[i][axis] ==", "[x.shape for x in args] if len(set(shapes)) == 1: # Common case where", "' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok) for _m in args] shapes", "- nds[i] if diff > 0: shapes[i] = [1] * diff + shapes[i]", "as nothing is broadcasted. common_shape.append(1) # Construct the new arrays. broadcasted = []", "make the number of dimensions equal. for i in range(len(args)): diff = biggest", "= [list(x.strides) for x in args] nds = [len(s) for s in shapes]", "repeats its # data. for i in range(len(args)): if shapes[i][axis] == 1: shapes[i][axis]", "+ (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape !=", "common_shape = [] for axis in range(biggest): lengths = [s[axis] for s in", "in order to make the number of dimensions equal. for i in range(len(args)):", "TypeError('broadcast_arrays() got an unexpected keyword ' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok)", "# this value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length) # For each array, if", "is broadcasted. common_shape.append(1) # Construct the new arrays. broadcasted = [] for (x,", "of length 1 to each of # the shapes in order to make", "broadcasted. common_shape.append(1) # Construct the new arrays. broadcasted = [] for (x, sh)", "There is exactly one non-1 length. The common shape will take # this", "= [] for axis in range(biggest): lengths = [s[axis] for s in shapes]", "x in args] if len(set(shapes)) == 1: # Common case where nothing needs", "dimensions equal. for i in range(len(args)): diff = biggest - nds[i] if diff", "diff = biggest - nds[i] if diff > 0: shapes[i] = [1] *", "1: # Common case where nothing needs to be broadcasted. return args shapes", "set(lengths + [1]) if len(unique) > 2: # There must be at least", "compatibility. A dimension length of 1 is # accepted as compatible with any", "be at least two non-1 lengths for this axis. raise ValueError(\"shape mismatch: two", "in range(len(args)): diff = biggest - nds[i] if diff > 0: shapes[i] =", "= max(nds) # Go through each array and prepend dimensions of length 1", "for (x, sh) in zip(args, shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps =", "this axis. Strides can be # left alone as nothing is broadcasted. common_shape.append(1)", "Every array has a length of 1 on this axis. Strides can be", "subok = kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays() got an unexpected keyword '", "= numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape != tuple(sh)): x =", "= biggest - nds[i] if diff > 0: shapes[i] = [1] * diff", "is exactly one non-1 length. The common shape will take # this value.", "i in range(len(args)): if shapes[i][axis] == 1: shapes[i][axis] = new_length strides[i][axis] = 0", "# Chech each dimension for compatibility. A dimension length of 1 is #", "# Common case where nothing needs to be broadcasted. return args shapes =", "broadcast_arrays(*args, **kwargs): subok = kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays() got an unexpected", "shapes[i][axis] == 1: shapes[i][axis] = new_length strides[i][axis] = 0 else: # Every array", "left alone as nothing is broadcasted. common_shape.append(1) # Construct the new arrays. broadcasted", "else: # Every array has a length of 1 on this axis. Strides", "= [0] * diff + strides[i] # Chech each dimension for compatibility. A", "args] shapes = [x.shape for x in args] if len(set(shapes)) == 1: #", "x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps))", "unique = set(lengths + [1]) if len(unique) > 2: # There must be", "kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays() got an unexpected keyword ' 'argument {}'.format(kwargs.pop()))", "for x in args] if len(set(shapes)) == 1: # Common case where nothing", "its # data. for i in range(len(args)): if shapes[i][axis] == 1: shapes[i][axis] =", "<reponame>FilipeMaia/afnumpy import afnumpy import numpy def broadcast_arrays(*args, **kwargs): subok = kwargs.pop('subok', False) if", "array and prepend dimensions of length 1 to each of # the shapes", "2: # There must be at least two non-1 lengths for this axis.", "length of 1, then set its stride to 0 so that it repeats", "compatible with any other length. common_shape = [] for axis in range(biggest): lengths", "if len(unique) > 2: # There must be at least two non-1 lengths", "args] nds = [len(s) for s in shapes] biggest = max(nds) # Go", "lengths for this axis. raise ValueError(\"shape mismatch: two or more arrays have \"", "non-1 length. The common shape will take # this value. unique.remove(1) new_length =", "common_shape.append(new_length) # For each array, if this axis is being broadcasted from a", "set its stride to 0 so that it repeats its # data. for", "shapes = [list(s) for s in shapes] strides = [list(x.strides) for x in", "in args] if len(set(shapes)) == 1: # Common case where nothing needs to", "is being broadcasted from a # length of 1, then set its stride", "> 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape != tuple(sh)): x = x.reshape(sh) broadcasted.append(x) return", "1 to each of # the shapes in order to make the number", "args = [afnumpy.array(_m, copy=False, subok=subok) for _m in args] shapes = [x.shape for", "0: shapes[i] = [1] * diff + shapes[i] strides[i] = [0] * diff", "== 1: # Common case where nothing needs to be broadcasted. return args", "Chech each dimension for compatibility. A dimension length of 1 is # accepted", "strides[i][axis] = 0 else: # Every array has a length of 1 on", "= [1] * diff + shapes[i] strides[i] = [0] * diff + strides[i]", "+ shapes[i] strides[i] = [0] * diff + strides[i] # Chech each dimension", "least two non-1 lengths for this axis. raise ValueError(\"shape mismatch: two or more", "# left alone as nothing is broadcasted. common_shape.append(1) # Construct the new arrays.", "being broadcasted from a # length of 1, then set its stride to", "unique.remove(1) new_length = unique.pop() common_shape.append(new_length) # For each array, if this axis is", "+ strides[i] # Chech each dimension for compatibility. A dimension length of 1", "Construct the new arrays. broadcasted = [] for (x, sh) in zip(args, shapes):", "each array and prepend dimensions of length 1 to each of # the", "= [x.shape for x in args] if len(set(shapes)) == 1: # Common case", "# accepted as compatible with any other length. common_shape = [] for axis", "other length. common_shape = [] for axis in range(biggest): lengths = [s[axis] for", "(axis,)) elif len(unique) == 2: # There is exactly one non-1 length. The", "this axis is being broadcasted from a # length of 1, then set", "or more arrays have \" \"incompatible dimensions on axis %r.\" % (axis,)) elif", "# There must be at least two non-1 lengths for this axis. raise", "keyword ' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok) for _m in args]", "the shapes in order to make the number of dimensions equal. for i", "Strides can be # left alone as nothing is broadcasted. common_shape.append(1) # Construct", "got an unexpected keyword ' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok) for", "be broadcasted. return args shapes = [list(s) for s in shapes] strides =", "to be broadcasted. return args shapes = [list(s) for s in shapes] strides", "s in shapes] biggest = max(nds) # Go through each array and prepend", "its stride to 0 so that it repeats its # data. for i", "% (axis,)) elif len(unique) == 2: # There is exactly one non-1 length.", "at least two non-1 lengths for this axis. raise ValueError(\"shape mismatch: two or", "= [len(s) for s in shapes] biggest = max(nds) # Go through each", "each array, if this axis is being broadcasted from a # length of", "strides = [list(x.strides) for x in args] nds = [len(s) for s in", "= unique.pop() common_shape.append(new_length) # For each array, if this axis is being broadcasted", "new_length strides[i][axis] = 0 else: # Every array has a length of 1", "numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape != tuple(sh)): x = x.reshape(sh)", "(x, sh) in zip(args, shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh)", "broadcasted from a # length of 1, then set its stride to 0", "have \" \"incompatible dimensions on axis %r.\" % (axis,)) elif len(unique) == 2:", "return args shapes = [list(s) for s in shapes] strides = [list(x.strides) for", "case where nothing needs to be broadcasted. return args shapes = [list(s) for", "to 0 so that it repeats its # data. for i in range(len(args)):", "will take # this value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length) # For each", "1 on this axis. Strides can be # left alone as nothing is", "it repeats its # data. for i in range(len(args)): if shapes[i][axis] == 1:", "x in args] nds = [len(s) for s in shapes] biggest = max(nds)", "diff + shapes[i] strides[i] = [0] * diff + strides[i] # Chech each", "subok=subok) for _m in args] shapes = [x.shape for x in args] if", "if kwargs: raise TypeError('broadcast_arrays() got an unexpected keyword ' 'argument {}'.format(kwargs.pop())) args =", "dimension length of 1 is # accepted as compatible with any other length.", "broadcasted = [] for (x, sh) in zip(args, shapes): x_sh = x.shape +", "False) if kwargs: raise TypeError('broadcast_arrays() got an unexpected keyword ' 'argument {}'.format(kwargs.pop())) args", "length of 1 is # accepted as compatible with any other length. common_shape", "ValueError(\"shape mismatch: two or more arrays have \" \"incompatible dimensions on axis %r.\"", "== 1: shapes[i][axis] = new_length strides[i][axis] = 0 else: # Every array has", "1: shapes[i][axis] = new_length strides[i][axis] = 0 else: # Every array has a", "shapes[i][axis] = new_length strides[i][axis] = 0 else: # Every array has a length", "for s in shapes] biggest = max(nds) # Go through each array and", "= [list(s) for s in shapes] strides = [list(x.strides) for x in args]", "strides[i] = [0] * diff + strides[i] # Chech each dimension for compatibility.", "axis. Strides can be # left alone as nothing is broadcasted. common_shape.append(1) #", "= 0 else: # Every array has a length of 1 on this", "(1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape != tuple(sh)):", "has a length of 1 on this axis. Strides can be # left", "shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x,", "shapes[i] strides[i] = [0] * diff + strides[i] # Chech each dimension for", "where nothing needs to be broadcasted. return args shapes = [list(s) for s", "any other length. common_shape = [] for axis in range(biggest): lengths = [s[axis]", "elif len(unique) == 2: # There is exactly one non-1 length. The common", "max(nds) # Go through each array and prepend dimensions of length 1 to", "shapes] biggest = max(nds) # Go through each array and prepend dimensions of", "a length of 1 on this axis. Strides can be # left alone", "[s[axis] for s in shapes] unique = set(lengths + [1]) if len(unique) >", "lengths = [s[axis] for s in shapes] unique = set(lengths + [1]) if", "be # left alone as nothing is broadcasted. common_shape.append(1) # Construct the new", "an unexpected keyword ' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok) for _m", "in shapes] biggest = max(nds) # Go through each array and prepend dimensions", "# data. for i in range(len(args)): if shapes[i][axis] == 1: shapes[i][axis] = new_length", "sh) in zip(args, shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps)", "more arrays have \" \"incompatible dimensions on axis %r.\" % (axis,)) elif len(unique)", "[len(s) for s in shapes] biggest = max(nds) # Go through each array", "= [] for (x, sh) in zip(args, shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim)", "[list(x.strides) for x in args] nds = [len(s) for s in shapes] biggest", "for i in range(len(args)): diff = biggest - nds[i] if diff > 0:", "needs to be broadcasted. return args shapes = [list(s) for s in shapes]", "two non-1 lengths for this axis. raise ValueError(\"shape mismatch: two or more arrays", "length of 1 on this axis. Strides can be # left alone as", "each of # the shapes in order to make the number of dimensions", "array, if this axis is being broadcasted from a # length of 1,", "%r.\" % (axis,)) elif len(unique) == 2: # There is exactly one non-1", "for i in range(len(args)): if shapes[i][axis] == 1: shapes[i][axis] = new_length strides[i][axis] =", "unexpected keyword ' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok) for _m in", "[afnumpy.array(_m, copy=False, subok=subok) for _m in args] shapes = [x.shape for x in", "range(len(args)): diff = biggest - nds[i] if diff > 0: shapes[i] = [1]", "from a # length of 1, then set its stride to 0 so", "[] for (x, sh) in zip(args, shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps", "diff > 0: shapes[i] = [1] * diff + shapes[i] strides[i] = [0]", "dimensions of length 1 to each of # the shapes in order to", "length. The common shape will take # this value. unique.remove(1) new_length = unique.pop()", "For each array, if this axis is being broadcasted from a # length", "is # accepted as compatible with any other length. common_shape = [] for", "nds[i] if diff > 0: shapes[i] = [1] * diff + shapes[i] strides[i]", "then set its stride to 0 so that it repeats its # data.", "\"incompatible dimensions on axis %r.\" % (axis,)) elif len(unique) == 2: # There", "len(unique) == 2: # There is exactly one non-1 length. The common shape", "== 2: # There is exactly one non-1 length. The common shape will", "unique.pop() common_shape.append(new_length) # For each array, if this axis is being broadcasted from", "array has a length of 1 on this axis. Strides can be #", "for axis in range(biggest): lengths = [s[axis] for s in shapes] unique =", "mismatch: two or more arrays have \" \"incompatible dimensions on axis %r.\" %", "+ [1]) if len(unique) > 2: # There must be at least two", "raise TypeError('broadcast_arrays() got an unexpected keyword ' 'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False,", "2: # There is exactly one non-1 length. The common shape will take", "new_length = unique.pop() common_shape.append(new_length) # For each array, if this axis is being", "this axis. raise ValueError(\"shape mismatch: two or more arrays have \" \"incompatible dimensions", "value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length) # For each array, if this axis", "order to make the number of dimensions equal. for i in range(len(args)): diff", "for s in shapes] unique = set(lengths + [1]) if len(unique) > 2:", "equal. for i in range(len(args)): diff = biggest - nds[i] if diff >", "of 1, then set its stride to 0 so that it repeats its", "accepted as compatible with any other length. common_shape = [] for axis in", "[1]) if len(unique) > 2: # There must be at least two non-1", "must be at least two non-1 lengths for this axis. raise ValueError(\"shape mismatch:", "zip(args, shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1):", "axis in range(biggest): lengths = [s[axis] for s in shapes] unique = set(lengths", "shapes] strides = [list(x.strides) for x in args] nds = [len(s) for s", "# Go through each array and prepend dimensions of length 1 to each", "with any other length. common_shape = [] for axis in range(biggest): lengths =", "> 2: # There must be at least two non-1 lengths for this", "A dimension length of 1 is # accepted as compatible with any other", "non-1 lengths for this axis. raise ValueError(\"shape mismatch: two or more arrays have", "shapes in order to make the number of dimensions equal. for i in", "numpy def broadcast_arrays(*args, **kwargs): subok = kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays() got", "nothing needs to be broadcasted. return args shapes = [list(s) for s in", "as compatible with any other length. common_shape = [] for axis in range(biggest):", "for x in args] nds = [len(s) for s in shapes] biggest =", "[list(s) for s in shapes] strides = [list(x.strides) for x in args] nds", "of 1 is # accepted as compatible with any other length. common_shape =", "= set(lengths + [1]) if len(unique) > 2: # There must be at", "shapes] unique = set(lengths + [1]) if len(unique) > 2: # There must", "raise ValueError(\"shape mismatch: two or more arrays have \" \"incompatible dimensions on axis", "arrays have \" \"incompatible dimensions on axis %r.\" % (axis,)) elif len(unique) ==", "two or more arrays have \" \"incompatible dimensions on axis %r.\" % (axis,))", "len(set(shapes)) == 1: # Common case where nothing needs to be broadcasted. return", "of # the shapes in order to make the number of dimensions equal.", "if len(set(shapes)) == 1: # Common case where nothing needs to be broadcasted.", "**kwargs): subok = kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays() got an unexpected keyword", "1, then set its stride to 0 so that it repeats its #", "args] if len(set(shapes)) == 1: # Common case where nothing needs to be", "of 1 on this axis. Strides can be # left alone as nothing", "biggest = max(nds) # Go through each array and prepend dimensions of length", "exactly one non-1 length. The common shape will take # this value. unique.remove(1)", "shape will take # this value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length) # For", "0 else: # Every array has a length of 1 on this axis.", "arrays. broadcasted = [] for (x, sh) in zip(args, shapes): x_sh = x.shape", "[] for axis in range(biggest): lengths = [s[axis] for s in shapes] unique", "common shape will take # this value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length) #", "stride to 0 so that it repeats its # data. for i in", "a # length of 1, then set its stride to 0 so that", "length. common_shape = [] for axis in range(biggest): lengths = [s[axis] for s", "\" \"incompatible dimensions on axis %r.\" % (axis,)) elif len(unique) == 2: #", "in shapes] strides = [list(x.strides) for x in args] nds = [len(s) for", "if this axis is being broadcasted from a # length of 1, then", "if shapes[i][axis] == 1: shapes[i][axis] = new_length strides[i][axis] = 0 else: # Every", "to each of # the shapes in order to make the number of", "for _m in args] shapes = [x.shape for x in args] if len(set(shapes))", "biggest - nds[i] if diff > 0: shapes[i] = [1] * diff +", "on this axis. Strides can be # left alone as nothing is broadcasted.", "so that it repeats its # data. for i in range(len(args)): if shapes[i][axis]", "'argument {}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok) for _m in args] shapes =", "# the shapes in order to make the number of dimensions equal. for", "can be # left alone as nothing is broadcasted. common_shape.append(1) # Construct the", "common_shape.append(1) # Construct the new arrays. broadcasted = [] for (x, sh) in", "def broadcast_arrays(*args, **kwargs): subok = kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays() got an", "if diff > 0: shapes[i] = [1] * diff + shapes[i] strides[i] =", "# For each array, if this axis is being broadcasted from a #", "= x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else:", "of dimensions equal. for i in range(len(args)): diff = biggest - nds[i] if", "in shapes] unique = set(lengths + [1]) if len(unique) > 2: # There", "# Every array has a length of 1 on this axis. Strides can", "= kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays() got an unexpected keyword ' 'argument", "axis %r.\" % (axis,)) elif len(unique) == 2: # There is exactly one", "number of dimensions equal. for i in range(len(args)): diff = biggest - nds[i]", "if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape != tuple(sh)): x = x.reshape(sh) broadcasted.append(x)", "in zip(args, shapes): x_sh = x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) >", "# Construct the new arrays. broadcasted = [] for (x, sh) in zip(args,", "shapes = [x.shape for x in args] if len(set(shapes)) == 1: # Common", "length 1 to each of # the shapes in order to make the", "one non-1 length. The common shape will take # this value. unique.remove(1) new_length", "{}'.format(kwargs.pop())) args = [afnumpy.array(_m, copy=False, subok=subok) for _m in args] shapes = [x.shape", "_m in args] shapes = [x.shape for x in args] if len(set(shapes)) ==", "args shapes = [list(s) for s in shapes] strides = [list(x.strides) for x", "for s in shapes] strides = [list(x.strides) for x in args] nds =", "dimensions on axis %r.\" % (axis,)) elif len(unique) == 2: # There is", "the number of dimensions equal. for i in range(len(args)): diff = biggest -", "# length of 1, then set its stride to 0 so that it", "copy=False, subok=subok) for _m in args] shapes = [x.shape for x in args]", "x.shape + (1,)*(len(sh)-x.ndim) reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape", "shapes[i] = [1] * diff + shapes[i] strides[i] = [0] * diff +", "1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape != tuple(sh)): x = x.reshape(sh) broadcasted.append(x) return broadcasted", "take # this value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length) # For each array,", "* diff + shapes[i] strides[i] = [0] * diff + strides[i] # Chech", "nds = [len(s) for s in shapes] biggest = max(nds) # Go through", "for this axis. raise ValueError(\"shape mismatch: two or more arrays have \" \"incompatible", "each dimension for compatibility. A dimension length of 1 is # accepted as", "0 so that it repeats its # data. for i in range(len(args)): if", "# There is exactly one non-1 length. The common shape will take #", "strides[i] # Chech each dimension for compatibility. A dimension length of 1 is", "to make the number of dimensions equal. for i in range(len(args)): diff =", "[1] * diff + shapes[i] strides[i] = [0] * diff + strides[i] #", "reps = numpy.array(sh)//numpy.array(x_sh) if(numpy.prod(reps) > 1): broadcasted.append(afnumpy.tile(x, reps)) else: if(x.shape != tuple(sh)): x", "axis is being broadcasted from a # length of 1, then set its", "axis. raise ValueError(\"shape mismatch: two or more arrays have \" \"incompatible dimensions on", "in args] shapes = [x.shape for x in args] if len(set(shapes)) == 1:", "= new_length strides[i][axis] = 0 else: # Every array has a length of", "prepend dimensions of length 1 to each of # the shapes in order", "= [afnumpy.array(_m, copy=False, subok=subok) for _m in args] shapes = [x.shape for x", "data. for i in range(len(args)): if shapes[i][axis] == 1: shapes[i][axis] = new_length strides[i][axis]", "[0] * diff + strides[i] # Chech each dimension for compatibility. A dimension", "the new arrays. broadcasted = [] for (x, sh) in zip(args, shapes): x_sh", "in range(biggest): lengths = [s[axis] for s in shapes] unique = set(lengths +", "Common case where nothing needs to be broadcasted. return args shapes = [list(s)", "on axis %r.\" % (axis,)) elif len(unique) == 2: # There is exactly", "through each array and prepend dimensions of length 1 to each of #", "this value. unique.remove(1) new_length = unique.pop() common_shape.append(new_length) # For each array, if this", "in range(len(args)): if shapes[i][axis] == 1: shapes[i][axis] = new_length strides[i][axis] = 0 else:", "new arrays. broadcasted = [] for (x, sh) in zip(args, shapes): x_sh =", "range(len(args)): if shapes[i][axis] == 1: shapes[i][axis] = new_length strides[i][axis] = 0 else: #", "import numpy def broadcast_arrays(*args, **kwargs): subok = kwargs.pop('subok', False) if kwargs: raise TypeError('broadcast_arrays()", "1 is # accepted as compatible with any other length. common_shape = []", "and prepend dimensions of length 1 to each of # the shapes in" ]
[ "set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')]) assert 'alias_a' in alias_dict assert alias_dict['alias_a'] is", "are received from the method assert class_instance.property_1 == 1 assert class_instance.calls == 1", "'d'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2 assert set(alias_dict.items()) == set([('a', 'b'), ('c',", "not in alias_dict assert 'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', (", "RecursiveCallException from bravado_core.util import sanitize_name from bravado_core.util import strip_xscope def test_cached_property(): class Class(object):", "+ id(None) assert not mock_getcallargs.called assert decorated_function.cache == { (('a', id(1)), ('b', id(None))):", "bravado_core.util import lazy_class_attribute from bravado_core.util import memoize_by_id from bravado_core.util import ObjectType from bravado_core.util", "== 1 # If property is called twice no calls are received from", "# Calling the decorated method with known arguments will not call the inner", "inner method assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a',", "not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything that is not", "def test_no_op(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', }, } expected =", "0 assert class_instance_1.prop == 1 assert class_instance_1.calls == 1 class_instance_2 = Class() assert", "'alias_a' in alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f',", "id(None))): id(1) + id(None), } @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), #", "'#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected = [ { '$ref':", "} assert calls == [[1, None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def", "('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), #", "'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters': {'param': {'type':", "we strip multiple underscores ('100percent', 'percent'), # make sure we remove all digits", "'MON': { '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) def test_petstore_spec(petstore_spec): assert", "alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not there')", "is deleted then the method is called again del class_instance.property_1 assert class_instance.property_1 ==", "1 return self.calls assert isinstance(Class.property_1, cached_property) class_instance = Class() assert class_instance.calls == 0", "calls == [[1, None]] assert decorated_function(2, 3) == id(2) + id(3) assert decorated_function.cache", "('c', 'd')]) assert 'alias_a' in alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is", "assert expected == strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def test_contained_in_list(): fragment = [", "calls == [[1, None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1) == id(1) + id(None)", "assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If property is deleted", "def property_1(self): self.calls += 1 return self.calls assert isinstance(Class.property_1, cached_property) class_instance = Class()", "not in alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy =", "__init__(self): self.calls = 0 @cached_property def property_1(self): self.calls += 1 return self.calls assert", "'alias_a' not in alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy", "== { (('a', id(1)), ('b', id(None))): id(1) + id(None), } @pytest.mark.parametrize( ('input', 'expected'),", "decorated_function.cache.clear() assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a', id(1)),", "def test_memoize_by_id_decorator(): calls = [] def function(a, b=None): calls.append([a, b]) return id(a) +", "1 assert 'a' not in alias_dict assert 'alias_a' not in alias_dict def test_AliasKeyDict_copy():", "determine_object_type from bravado_core.util import lazy_class_attribute from bravado_core.util import memoize_by_id from bravado_core.util import ObjectType", "} assert calls == [[1, None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1) == id(1)", "alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not there') == 'not", "[True, {'description': 'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters':", "coding: utf-8 -*- from inspect import getcallargs import mock import pytest from bravado_core.util", "== set([('a', 'b'), ('c', 'd')]) assert 'alias_a' in alias_dict assert alias_dict['alias_a'] is alias_dict['a']", "AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict) == 0 assert 'baz' not", "'percent'), # make sure we remove all digits ('100.0', '_100_0'), # a name", "from bravado_core.util import strip_xscope def test_cached_property(): class Class(object): def __init__(self): self.calls = 0", "1 assert class_instance_1.calls == 1 class_instance_2 = Class() assert class_instance_2.calls == 1 assert", "id(None), } assert calls == [[1, None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs)", "{ (('a', id(1)), ('b', id(None))): id(1) + id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) ==", "id(None), } assert calls == [[1, None]] assert decorated_function(2, 3) == id(2) +", "make sure we remove all digits ('100.0', '_100_0'), # a name consisting mostly", "memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a', id(1)),", "alias_dict.pop('alias_a') == 'b' assert len(alias_dict) == 1 assert 'a' not in alias_dict assert", "# If property is deleted then the method is called again del class_instance.property_1", "'$ref': '#/definitions/DayHours', }, } expected = { 'MON': { '$ref': '#/definitions/DayHours', }, }", "], }, } expected = { 'MON': { '$ref': '#/definitions/DayHours', }, } assert", "3]] decorated_function.cache.clear() assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a',", ") def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty(): assert", "calls == [[1, None], [2, 3]] # Calling the decorated method with known", "ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response", "class_instance.property_1 assert class_instance.property_1 == 2 assert class_instance.calls == 2 def test_class_cached_property(): class Class(object):", "3) == id(2) + id(3) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))):", "+ id(None), (('a', id(2)), ('b', id(3))): id(2) + id(3), } assert calls ==", "'#/definitions/DayHours', }, ] assert expected == strip_xscope(fragment) assert 'x-scope' in fragment[0] def test_no_op():", "is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not there') == 'not there'", "{'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema': {'type': 'object'}},", "'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True,", "assert decorated_function(a=1) == id(1) + id(None) assert not mock_getcallargs.called assert decorated_function.cache == {", "assert 'x-scope' in fragment['MON'] def test_contained_in_list(): fragment = [ { '$ref': '#/definitions/DayHours', 'x-scope':", "def test_contained_in_list(): fragment = [ { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ],", "alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert", "test_no_op(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', }, } expected = {", "def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items())", "strip_xscope def test_cached_property(): class Class(object): def __init__(self): self.calls = 0 @cached_property def property_1(self):", "in alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not", "assert decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1, None]]", "bravado_core.util import AliasKeyDict from bravado_core.util import cached_property from bravado_core.util import determine_object_type from bravado_core.util", "('b', id(None))): id(1) + id(None), } assert calls == [[1, None]] assert decorated_function(2,", "{ (('a', id(1)), ('b', id(None))): id(1) + id(None), } @pytest.mark.parametrize( ('input', 'expected'), [", "from the method assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If", "the inner method assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == {", "mock_getcallargs.called assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), }", "alias_dict['baz'] assert len(alias_dict) == 0 assert 'baz' not in alias_dict assert 'foo' not", "== [[1, None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls =", "test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A)", "strip_xscope({}) def test_contained_in_dict(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json',", "mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) + id(None) assert not mock_getcallargs.called assert decorated_function.cache ==", "1 # If property is deleted then the method is called again del", "( [True, 'anything that is not a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name':", "id(3))): id(2) + id(3), } assert calls == [[1, None], [2, 3]] decorated_function.cache.clear()", "in alias_dict assert 'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True,", "'anything that is not a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body', 'required':", "from bravado_core.util import lazy_class_attribute from bravado_core.util import memoize_by_id from bravado_core.util import ObjectType from", "== 0 assert class_instance_1.prop == 1 assert class_instance_1.calls == 1 class_instance_2 = Class()", "no calls are received from the method assert class_instance.property_1 == 1 assert class_instance.calls", "expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty(): assert {} == strip_xscope({}) def", "('b', id(None))): id(1) + id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) + id(None)", "== 'not there' assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict) == 1 assert 'a'", "id(2)), ('b', id(3))): id(2) + id(3), } assert calls == [[1, None], [2,", "{ '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) def test_petstore_spec(petstore_spec): assert petstore_spec.client_spec_dict", "# make sure we remove all digits ('100.0', '_100_0'), # a name consisting", "def test_cached_property(): class Class(object): def __init__(self): self.calls = 0 @cached_property def property_1(self): self.calls", "(('a', id(1)), ('b', id(None))): id(1) + id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1)", "len(alias_dict) == 2 assert set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')]) assert 'alias_a' in", "test_class_cached_property(): class Class(object): calls = 0 @lazy_class_attribute def prop(cls): cls.calls += 1 return", "'not there') == 'not there' assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict) == 1", "of digits should keep them ], ) def test_sanitize_name(input, expected): assert sanitize_name(input) ==", "1 # If property is called twice no calls are received from the", "def test_empty(): assert {} == strip_xscope({}) def test_contained_in_dict(): fragment = { 'MON': {", "from bravado_core.util import AliasKeyDict from bravado_core.util import cached_property from bravado_core.util import determine_object_type from", "self.calls += 1 return self.calls assert isinstance(Class.property_1, cached_property) class_instance = Class() assert class_instance.calls", "-*- from inspect import getcallargs import mock import pytest from bravado_core.util import AliasKeyDict", "test_contained_in_list(): fragment = [ { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], },", "class_instance_2.prop == 1 assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id", "'$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected = { 'MON':", "== [mock.sentinel.A] def test_memoize_by_id_decorator(): calls = [] def function(a, b=None): calls.append([a, b]) return", "consisting mostly of digits should keep them ], ) def test_sanitize_name(input, expected): assert", "+ id(3) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None),", "calls are received from the method assert class_instance.property_1 == 1 assert class_instance.calls ==", "{ 'MON': { '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) assert 'x-scope'", "None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def", "wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a, b=None): calls.append([a, b]) return id(a)", "not mock_getcallargs.called assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None),", "sure we strip multiple underscores ('100percent', 'percent'), # make sure we remove all", "== id(2) + id(3) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1)", "= { 'MON': { '$ref': '#/definitions/DayHours', }, } expected = { 'MON': {", "test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items()) ==", "method assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If property is", "+ id(None), } assert calls == [[1, None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs',", "b=None): calls.append([a, b]) return id(a) + id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) ==", "1 assert class_instance.calls == 1 # If property is deleted then the method", "= { 'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, }", "id(1) + id(None), } @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), # simple", "leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal", "# bunch of illegal chars ('__foo__', 'foo'), # make sure we strip multiple", "'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected = { 'MON': { '$ref':", "AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key", "class Class(object): def __init__(self): self.calls = 0 @cached_property def property_1(self): self.calls += 1", "received from the method assert class_instance.property_1 == 1 assert class_instance.calls == 1 #", "@cached_property def property_1(self): self.calls += 1 return self.calls assert isinstance(Class.property_1, cached_property) class_instance =", "'_100_0'), # a name consisting mostly of digits should keep them ], )", "[1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a, b=None): calls.append([a,", "assert class_instance_1.prop == 1 assert class_instance_1.calls == 1 class_instance_2 = Class() assert class_instance_2.calls", "set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')])", "'$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected = [ {", "{ '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected = {", "not call the inner method assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache", "pytest from bravado_core.util import AliasKeyDict from bravado_core.util import cached_property from bravado_core.util import determine_object_type", "known arguments will not call the inner method assert decorated_function(1) == id(1) +", "class_instance_1.prop == 1 assert class_instance_1.calls == 1 class_instance_2 = Class() assert class_instance_2.calls ==", "} mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) + id(None) assert not mock_getcallargs.called assert decorated_function.cache", "description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ), ) def test_determine_object_type(default_type_to_object, object_dict,", "('b', id(None))): id(1) + id(None), } assert calls == [[1, None], [2, 3],", "== 0 assert 'baz' not in alias_dict assert 'foo' not in alias_dict @pytest.mark.parametrize(", "import getcallargs import mock import pytest from bravado_core.util import AliasKeyDict from bravado_core.util import", "0 @cached_property def property_1(self): self.calls += 1 return self.calls assert isinstance(Class.property_1, cached_property) class_instance", "[True, {'get': {'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema':", "{ 'MON': { '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) def test_petstore_spec(petstore_spec):", "+ id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1, None]] assert decorated_function.cache == {", "2 assert set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')]) assert 'alias_a' in alias_dict assert", "'#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) def test_petstore_spec(petstore_spec): assert petstore_spec.client_spec_dict == strip_xscope(petstore_spec.spec_dict)", "@pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything that is not a dictionary', ObjectType.UNKNOWN],", "== 1 assert class_instance.calls == 1 # If property is deleted then the", "== id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1, None]] assert decorated_function.cache", "assert class_instance_2.calls == 1 assert class_instance_2.prop == 1 assert class_instance_2.calls == 1 def", "# a name consisting mostly of digits should keep them ], ) def", "assert calls == [[1, None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs):", "'MON': { '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) assert 'x-scope' in", "from bravado_core.util import cached_property from bravado_core.util import determine_object_type from bravado_core.util import lazy_class_attribute from", "None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a, b=None): calls.append([a, b])", "'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ), ) def test_determine_object_type(default_type_to_object,", "'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected = [ { '$ref': '#/definitions/DayHours',", "2 def test_class_cached_property(): class Class(object): calls = 0 @lazy_class_attribute def prop(cls): cls.calls +=", "case ('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'),", "def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty(): assert {}", "fragment = { 'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], },", "property is called twice no calls are received from the method assert class_instance.property_1", "+= 1 return cls.calls class_instance_1 = Class() assert class_instance_1.calls == 0 assert class_instance_1.prop", "assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del", "fragment = { 'MON': { '$ref': '#/definitions/DayHours', }, } expected = { 'MON':", "underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal chars", "'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def", "from bravado_core.util import memoize_by_id from bravado_core.util import ObjectType from bravado_core.util import RecursiveCallException from", "assert calls == [[1, None], [2, 3]] # Calling the decorated method with", "id(3) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), (('a',", "id(a) + id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) assert", "isinstance(Class.property_1, cached_property) class_instance = Class() assert class_instance.calls == 0 assert class_instance.property_1 == 1", "id(None))): id(1) + id(None), } assert calls == [[1, None]] assert decorated_function(2, 3)", "illegal chars ('__foo__', 'foo'), # make sure we strip multiple underscores ('100percent', 'percent'),", "method assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a', id(1)),", "cached_property from bravado_core.util import determine_object_type from bravado_core.util import lazy_class_attribute from bravado_core.util import memoize_by_id", "dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body', 'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER],", "assert 'a' not in alias_dict assert 'alias_a' not in alias_dict def test_AliasKeyDict_copy(): alias_dict", "return id(a) + id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None)", "== [[1, None]] assert decorated_function(2, 3) == id(2) + id(3) assert decorated_function.cache ==", "{'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN],", "}, ] expected = [ { '$ref': '#/definitions/DayHours', }, ] assert expected ==", "calls = [] @memoize_by_id def function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert", "function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls = []", "alias_dict.get('f', 'not there') == 'not there' assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict) ==", "assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo',", "None]] assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), }", "{'description': 'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters': {'param':", "= 0 @lazy_class_attribute def prop(cls): cls.calls += 1 return cls.calls class_instance_1 = Class()", "= [ { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected", "AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2 assert set(alias_dict.items()) ==", "'baz' not in alias_dict assert 'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type',", "id(2) + id(3) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) +", "[True, 'anything that is not a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body',", "again del class_instance.property_1 assert class_instance.property_1 == 2 assert class_instance.calls == 2 def test_class_cached_property():", "= memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a',", "id(None) assert not mock_getcallargs.called assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1)", "name consisting mostly of digits should keep them ], ) def test_sanitize_name(input, expected):", "def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) ==", "Id', 'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), #", "== strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def test_contained_in_list(): fragment = [ { '$ref':", "return cls.calls class_instance_1 = Class() assert class_instance_1.calls == 0 assert class_instance_1.prop == 1", "'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything that is not a dictionary', ObjectType.UNKNOWN], [True,", "id(2) + id(3), } assert calls == [[1, None], [2, 3]] decorated_function.cache.clear() assert", "dict_copy = alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del():", "1) assert calls == [[1, None]] assert decorated_function.cache == { (('a', id(1)), ('b',", "id(1)), ('b', id(None))): id(1) + id(None), (('a', id(2)), ('b', id(3))): id(2) + id(3),", "{'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert", "== { (('a', id(1)), ('b', id(None))): id(1) + id(None), } assert calls ==", "assert {} == strip_xscope({}) def test_contained_in_dict(): fragment = { 'MON': { '$ref': '#/definitions/DayHours',", "[2, 3]] decorated_function.cache.clear() assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == {", "calls = [] def function(a, b=None): calls.append([a, b]) return id(a) + id(b) decorated_function", "('__foo__', 'foo'), # make sure we strip multiple underscores ('100percent', 'percent'), # make", "id(None), (('a', id(2)), ('b', id(3))): id(2) + id(3), } assert calls == [[1,", "cls.calls += 1 return cls.calls class_instance_1 = Class() assert class_instance_1.calls == 0 assert", "id(1) + id(None), } assert calls == [[1, None]] assert decorated_function(2, 3) ==", "assert 'alias_a' not in alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo')", "mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1, None]] assert decorated_function.cache == { (('a', id(1)),", "class Class(object): calls = 0 @lazy_class_attribute def prop(cls): cls.calls += 1 return cls.calls", "test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2", "test_cached_property(): class Class(object): def __init__(self): self.calls = 0 @cached_property def property_1(self): self.calls +=", "in alias_dict assert 'alias_a' not in alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')])", "import AliasKeyDict from bravado_core.util import cached_property from bravado_core.util import determine_object_type from bravado_core.util import", "[[1, None]] assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None),", "import RecursiveCallException from bravado_core.util import sanitize_name from bravado_core.util import strip_xscope def test_cached_property(): class", "class_instance.calls == 1 # If property is called twice no calls are received", "decorated method with known arguments will not call the inner method assert decorated_function(1)", "alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2 assert set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')])", "(('a', id(1)), ('b', id(None))): id(1) + id(None), (('a', id(2)), ('b', id(3))): id(2) +", "assert 'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything that", "== 1 assert class_instance_1.calls == 1 class_instance_2 = Class() assert class_instance_2.calls == 1", "= { 'MON': { '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) def", "'a' not in alias_dict assert 'alias_a' not in alias_dict def test_AliasKeyDict_copy(): alias_dict =", ") def test_sanitize_name(input, expected): assert sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a':", "== { (('a', id(1)), ('b', id(None))): id(1) + id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1)", "assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), (('a', id(2)),", "test_empty(): assert {} == strip_xscope({}) def test_contained_in_dict(): fragment = { 'MON': { '$ref':", "ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ),", "bravado_core.util import memoize_by_id from bravado_core.util import ObjectType from bravado_core.util import RecursiveCallException from bravado_core.util", "expected_object_type', ( [True, 'anything that is not a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body',", "in fragment['MON'] def test_contained_in_list(): fragment = [ { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json',", "alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not there') == 'not there' assert", "ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description',", "== strip_xscope({}) def test_contained_in_dict(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [", "= alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict", "assert isinstance(Class.property_1, cached_property) class_instance = Class() assert class_instance.calls == 0 assert class_instance.property_1 ==", "def test_class_cached_property(): class Class(object): calls = 0 @lazy_class_attribute def prop(cls): cls.calls += 1", "assert class_instance.calls == 1 # If property is deleted then the method is", "{'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters':", "class_instance_1 = Class() assert class_instance_1.calls == 0 assert class_instance_1.prop == 1 assert class_instance_1.calls", "bravado_core.util import cached_property from bravado_core.util import determine_object_type from bravado_core.util import lazy_class_attribute from bravado_core.util", "{ '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) assert 'x-scope' in fragment['MON']", "[ { '$ref': '#/definitions/DayHours', }, ] assert expected == strip_xscope(fragment) assert 'x-scope' in", "= [ { '$ref': '#/definitions/DayHours', }, ] assert expected == strip_xscope(fragment) assert 'x-scope'", "underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal chars ('__foo__', 'foo'), # make sure", "assert class_instance.calls == 0 assert class_instance.property_1 == 1 assert class_instance.calls == 1 #", "sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a')", "class_instance = Class() assert class_instance.calls == 0 assert class_instance.property_1 == 1 assert class_instance.calls", "id(1) + id(None), (('a', id(2)), ('b', id(3))): id(2) + id(3), } assert calls", "is called twice no calls are received from the method assert class_instance.property_1 ==", "in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything that is not a", "'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'), # leading/trailing underscore", "utf-8 -*- from inspect import getcallargs import mock import pytest from bravado_core.util import", "all digits ('100.0', '_100_0'), # a name consisting mostly of digits should keep", "self.calls assert isinstance(Class.property_1, cached_property) class_instance = Class() assert class_instance.calls == 0 assert class_instance.property_1", "description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response", "expected == strip_xscope(fragment) assert 'x-scope' in fragment[0] def test_no_op(): fragment = { 'MON':", "class_instance_1.calls == 0 assert class_instance_1.prop == 1 assert class_instance_1.calls == 1 class_instance_2 =", "fragment['MON'] def test_contained_in_list(): fragment = [ { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours',", "class_instance_1.calls == 1 class_instance_2 = Class() assert class_instance_2.calls == 1 assert class_instance_2.prop ==", "chars ('__foo__', 'foo'), # make sure we strip multiple underscores ('100percent', 'percent'), #", "from bravado_core.util import determine_object_type from bravado_core.util import lazy_class_attribute from bravado_core.util import memoize_by_id from", "we remove all digits ('100.0', '_100_0'), # a name consisting mostly of digits", "assert calls == [[1, None]] assert decorated_function(2, 3) == id(2) + id(3) assert", "getcallargs import mock import pytest from bravado_core.util import AliasKeyDict from bravado_core.util import cached_property", "'x-scope' in fragment['MON'] def test_contained_in_list(): fragment = [ { '$ref': '#/definitions/DayHours', 'x-scope': [", "'a') assert len(alias_dict) == 2 assert set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')]) assert", "'#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def test_contained_in_list():", "id(3), } assert calls == [[1, None], [2, 3]] # Calling the decorated", "noqa ), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def", "'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything that is", "import determine_object_type from bravado_core.util import lazy_class_attribute from bravado_core.util import memoize_by_id from bravado_core.util import", "= [] @memoize_by_id def function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls", "'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response", "'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict) == 0 assert 'baz' not in", "2 assert class_instance.calls == 2 def test_class_cached_property(): class Class(object): calls = 0 @lazy_class_attribute", "calls == [[1, None]] assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1)", "b]) return id(a) + id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) +", "If property is deleted then the method is called again del class_instance.property_1 assert", "] assert expected == strip_xscope(fragment) assert 'x-scope' in fragment[0] def test_no_op(): fragment =", "sanitize_name from bravado_core.util import strip_xscope def test_cached_property(): class Class(object): def __init__(self): self.calls =", "(('a', id(1)), ('b', id(None))): id(1) + id(None), } @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy", "assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not there') == 'not there' assert alias_dict.pop('alias_a')", "expected = { 'MON': { '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment)", "def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException):", "id(None))): id(1) + id(None), (('a', id(2)), ('b', id(3))): id(2) + id(3), } assert", "there' assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict) == 1 assert 'a' not in", "id(1)), ('b', id(None))): id(1) + id(None), } @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id',", "call the inner method assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache ==", "test_sanitize_name(input, expected): assert sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c':", "@memoize_by_id def function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A]", "+ id(None), } @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), # simple case", "} expected = { 'MON': { '$ref': '#/definitions/DayHours', }, } assert expected ==", "== [[1, None], [2, 3]] # Calling the decorated method with known arguments", "assert decorated_function(2, 3) == id(2) + id(3) assert decorated_function.cache == { (('a', id(1)),", "digits should keep them ], ) def test_sanitize_name(input, expected): assert sanitize_name(input) == expected", "== 2 def test_class_cached_property(): class Class(object): calls = 0 @lazy_class_attribute def prop(cls): cls.calls", "'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type):", "= Class() assert class_instance_1.calls == 0 assert class_instance_1.prop == 1 assert class_instance_1.calls ==", "assert calls == [[1, None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1) == id(1) +", "'$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def", "id(3), } assert calls == [[1, None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1) ==", "True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM],", "[True, {'in': 'body', 'name': 'body', 'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get':", "is called again del class_instance.property_1 assert class_instance.property_1 == 2 assert class_instance.calls == 2", "arguments will not call the inner method assert decorated_function(1) == id(1) + id(None)", "== 1 assert class_instance_2.prop == 1 assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls", "None], [2, 3]] # Calling the decorated method with known arguments will not", "import lazy_class_attribute from bravado_core.util import memoize_by_id from bravado_core.util import ObjectType from bravado_core.util import", "of illegal chars ('__foo__', 'foo'), # make sure we strip multiple underscores ('100percent',", "{'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True,", "assert 'x-scope' in fragment[0] def test_no_op(): fragment = { 'MON': { '$ref': '#/definitions/DayHours',", "def __init__(self): self.calls = 0 @cached_property def property_1(self): self.calls += 1 return self.calls", "assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not there') ==", "id(a) + id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function,", "dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz']", "default_type_to_object) == expected_object_type def test_empty(): assert {} == strip_xscope({}) def test_contained_in_dict(): fragment =", "bravado_core.util import RecursiveCallException from bravado_core.util import sanitize_name from bravado_core.util import strip_xscope def test_cached_property():", "{ '$ref': '#/definitions/DayHours', }, ] assert expected == strip_xscope(fragment) assert 'x-scope' in fragment[0]", "is not a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body', 'required': True, 'schema':", "not in alias_dict assert 'alias_a' not in alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo',", "== 2 assert set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')]) assert 'alias_a' in alias_dict", "{'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description':", "def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a, b=None): calls.append([a, b]) return id(a) +", "expected == strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def test_contained_in_list(): fragment = [ {", "function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator():", "== 1 class_instance_2 = Class() assert class_instance_2.calls == 1 assert class_instance_2.prop == 1", "strip multiple underscores ('100percent', 'percent'), # make sure we remove all digits ('100.0',", "a name consisting mostly of digits should keep them ], ) def test_sanitize_name(input,", "0 assert 'baz' not in alias_dict assert 'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object,", "'$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) def test_petstore_spec(petstore_spec): assert petstore_spec.client_spec_dict ==", "test_memoize_by_id_decorator(): calls = [] def function(a, b=None): calls.append([a, b]) return id(a) + id(b)", "from bravado_core.util import ObjectType from bravado_core.util import RecursiveCallException from bravado_core.util import sanitize_name from", "'name': 'body', 'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description':", "'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch", "assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), } assert", "id(1)), ('b', id(None))): id(1) + id(None), } assert calls == [[1, None]] assert", "function(a, b=None): calls.append([a, b]) return id(a) + id(b) decorated_function = memoize_by_id(function) assert decorated_function(1)", "== { (('a', id(1)), ('b', id(None))): id(1) + id(None), (('a', id(2)), ('b', id(3))):", "3]] # Calling the decorated method with known arguments will not call the", "[2, 3]] # Calling the decorated method with known arguments will not call", "decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), (('a', id(2)), ('b',", "alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything that is not a dictionary',", "in fragment[0] def test_no_op(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', }, }", "digits ('100.0', '_100_0'), # a name consisting mostly of digits should keep them", "bravado_core.util import sanitize_name from bravado_core.util import strip_xscope def test_cached_property(): class Class(object): def __init__(self):", "+ id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache", "{ 'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected", "with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls = [] def", "decorated_function(a=1) == id(1) + id(None) assert not mock_getcallargs.called assert decorated_function.cache == { (('a',", "'d')]) assert 'alias_a' in alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a')", "} assert calls == [[1, None], [2, 3]] # Calling the decorated method", "class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def function(a): calls.append(a) return", "'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA],", "{ (('a', id(1)), ('b', id(None))): id(1) + id(None), } assert calls == [[1,", "there') == 'not there' assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict) == 1 assert", "== 1 assert 'a' not in alias_dict assert 'alias_a' not in alias_dict def", "decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), } mock_getcallargs.reset_mock() assert", "assert class_instance_1.calls == 0 assert class_instance_1.prop == 1 assert class_instance_1.calls == 1 class_instance_2", "class_instance_2.calls == 1 assert class_instance_2.prop == 1 assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call():", "# make sure we strip multiple underscores ('100percent', 'percent'), # make sure we", "alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict)", "from bravado_core.util import RecursiveCallException from bravado_core.util import sanitize_name from bravado_core.util import strip_xscope def", "[[1, None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache", "{'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], #", "cls.calls class_instance_1 = Class() assert class_instance_1.calls == 0 assert class_instance_1.prop == 1 assert", "('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal chars ('__foo__', 'foo'), # make sure we", "id(1)), ('b', id(None))): id(1) + id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) +", "{} == strip_xscope({}) def test_contained_in_dict(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', 'x-scope':", "'foo') del alias_dict['baz'] assert len(alias_dict) == 0 assert 'baz' not in alias_dict assert", "id(1) + id(None) assert not mock_getcallargs.called assert decorated_function.cache == { (('a', id(1)), ('b',", "'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description':", "id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert", "decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), } assert calls", "'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa", "[mock.sentinel.A] def test_memoize_by_id_decorator(): calls = [] def function(a, b=None): calls.append([a, b]) return id(a)", "import pytest from bravado_core.util import AliasKeyDict from bravado_core.util import cached_property from bravado_core.util import", "} assert calls == [[1, None]] assert decorated_function(2, 3) == id(2) + id(3)", "alias_dict assert 'alias_a' not in alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz',", "test_contained_in_dict(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ],", "== set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz',", "1 class_instance_2 = Class() assert class_instance_2.calls == 1 assert class_instance_2.prop == 1 assert", "description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters': {'param': {'type':", "], }, ] expected = [ { '$ref': '#/definitions/DayHours', }, ] assert expected", "object_dict, expected_object_type', ( [True, 'anything that is not a dictionary', ObjectType.UNKNOWN], [True, {'in':", "[] @memoize_by_id def function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls ==", "not a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body', 'required': True, 'schema': {'type':", "bravado_core.util import strip_xscope def test_cached_property(): class Class(object): def __init__(self): self.calls = 0 @cached_property", "return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls =", "1 assert class_instance.calls == 1 # If property is called twice no calls", "@mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a, b=None): calls.append([a, b]) return", "'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected =", "self.calls = 0 @cached_property def property_1(self): self.calls += 1 return self.calls assert isinstance(Class.property_1,", "assert class_instance_1.calls == 1 class_instance_2 = Class() assert class_instance_2.calls == 1 assert class_instance_2.prop", "1 assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def function(a):", "underscores ('100percent', 'percent'), # make sure we remove all digits ('100.0', '_100_0'), #", "method with known arguments will not call the inner method assert decorated_function(1) ==", "== 1 def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def function(a): calls.append(a) return function(a)", "== expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert", "alias_dict assert 'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict, expected_object_type', ( [True, 'anything", "bravado_core.util import determine_object_type from bravado_core.util import lazy_class_attribute from bravado_core.util import memoize_by_id from bravado_core.util", "# If property is called twice no calls are received from the method", "test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict) == 0", "called twice no calls are received from the method assert class_instance.property_1 == 1", "then the method is called again del class_instance.property_1 assert class_instance.property_1 == 2 assert", "[False, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ), )", "determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty(): assert {} == strip_xscope({}) def test_contained_in_dict(): fragment", "AliasKeyDict from bravado_core.util import cached_property from bravado_core.util import determine_object_type from bravado_core.util import lazy_class_attribute", "ObjectType.UNKNOWN], # noqa ), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) ==", "assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def function(a): calls.append(a)", "Class() assert class_instance_1.calls == 0 assert class_instance_1.prop == 1 assert class_instance_1.calls == 1", "== 1 # If property is deleted then the method is called again", "id(2) + id(3), } assert calls == [[1, None], [2, 3]] # Calling", "('b', id(None))): id(1) + id(None), } @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'),", "[True, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description',", "[2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a,", "== id(1) + id(None) assert not mock_getcallargs.called assert decorated_function.cache == { (('a', id(1)),", "('100percent', 'percent'), # make sure we remove all digits ('100.0', '_100_0'), # a", "the method is called again del class_instance.property_1 assert class_instance.property_1 == 2 assert class_instance.calls", "import cached_property from bravado_core.util import determine_object_type from bravado_core.util import lazy_class_attribute from bravado_core.util import", "assert sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a',", "class_instance.property_1 == 1 assert class_instance.calls == 1 # If property is deleted then", "id(None))): id(1) + id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) + id(None) assert", "'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected = { 'MON': { '$ref': '#/definitions/DayHours', }, }", "'$ref': '#/definitions/DayHours', }, ] assert expected == strip_xscope(fragment) assert 'x-scope' in fragment[0] def", "set([('a', 'b'), ('c', 'd')]) assert 'alias_a' in alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert", "calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls = [] def function(a, b=None): calls.append([a, b])", "the method assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If property", "'object'}}}, ObjectType.UNKNOWN], # noqa ), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object)", "del alias_dict['baz'] assert len(alias_dict) == 0 assert 'baz' not in alias_dict assert 'foo'", "def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict) ==", "ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description',", "{'get': {'responses': {'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema': {'type':", "'#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected = { 'MON': {", "class_instance.calls == 0 assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If", "'b'), ('c', 'd')]) assert 'alias_a' in alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a')", "set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo')", "assert class_instance.calls == 2 def test_class_cached_property(): class Class(object): calls = 0 @lazy_class_attribute def", "class_instance.calls == 1 # If property is deleted then the method is called", "'#/definitions/DayHours', }, } expected = { 'MON': { '$ref': '#/definitions/DayHours', }, } assert", "a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body', 'required': True, 'schema': {'type': 'object'}},", "assert class_instance.property_1 == 2 assert class_instance.calls == 2 def test_class_cached_property(): class Class(object): calls", "{'in': 'body', 'name': 'body', 'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses':", "double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal chars ('__foo__', 'foo'), # make", "strip_xscope(fragment) assert 'x-scope' in fragment[0] def test_no_op(): fragment = { 'MON': { '$ref':", "+ id(3), } assert calls == [[1, None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1)", "decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls", "id(None))): id(1) + id(None), } assert calls == [[1, None], [2, 3], [1,", "(('a', id(2)), ('b', id(3))): id(2) + id(3), } assert calls == [[1, None],", "assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty(): assert {} == strip_xscope({}) def test_contained_in_dict():", "assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), } mock_getcallargs.reset_mock()", "id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1, None]] assert decorated_function.cache ==", "len(alias_dict) == 1 assert 'a' not in alias_dict assert 'alias_a' not in alias_dict", "= { 'MON': { '$ref': '#/definitions/DayHours', }, } assert expected == strip_xscope(fragment) assert", "mock import pytest from bravado_core.util import AliasKeyDict from bravado_core.util import cached_property from bravado_core.util", "= 0 @cached_property def property_1(self): self.calls += 1 return self.calls assert isinstance(Class.property_1, cached_property)", "== 2 assert class_instance.calls == 2 def test_class_cached_property(): class Class(object): calls = 0", "assert len(alias_dict) == 2 assert set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')]) assert 'alias_a'", "class_instance.property_1 == 1 assert class_instance.calls == 1 # If property is called twice", "fragment[0] def test_no_op(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', }, } expected", "0 assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If property is", "def function(a, b=None): calls.append([a, b]) return id(a) + id(b) decorated_function = memoize_by_id(function) assert", "fragment = [ { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ]", "expected): assert sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'})", "# -*- coding: utf-8 -*- from inspect import getcallargs import mock import pytest", "# simple case ('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores", "assert not mock_getcallargs.called assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) +", "id(None) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), (('a',", "(('a', id(1)), ('b', id(None))): id(1) + id(None), } assert calls == [[1, None]]", "'body', 'name': 'body', 'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200':", "alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict) == 0 assert 'baz' not in alias_dict", "'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected = { 'MON': { '$ref': '#/definitions/DayHours', },", "function(mock.sentinel.A) assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls = [] def function(a, b=None):", "If property is called twice no calls are received from the method assert", "simple case ('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/',", "== strip_xscope(fragment) assert 'x-scope' in fragment[0] def test_no_op(): fragment = { 'MON': {", "= [] def function(a, b=None): calls.append([a, b]) return id(a) + id(b) decorated_function =", "assert class_instance.calls == 1 # If property is called twice no calls are", "1 assert class_instance_2.prop == 1 assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls =", "id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache ==", "= AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert", "id(None), } @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), # simple case ('_getPetById_',", "alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2 assert", "'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key ==", "'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}},", "cached_property) class_instance = Class() assert class_instance.calls == 0 assert class_instance.property_1 == 1 assert", "'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double", "('b', id(3))): id(2) + id(3), } assert calls == [[1, None], [2, 3]]", "id(1) + id(None) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) +", "keep them ], ) def test_sanitize_name(input, expected): assert sanitize_name(input) == expected def test_AliasKeyDict():", "import ObjectType from bravado_core.util import RecursiveCallException from bravado_core.util import sanitize_name from bravado_core.util import", "calls.append([a, b]) return id(a) + id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1)", "alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict) == 0 assert", "memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1,", "len(alias_dict) == 0 assert 'baz' not in alias_dict assert 'foo' not in alias_dict", "calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls", "== [[1, None]] assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) +", "-*- coding: utf-8 -*- from inspect import getcallargs import mock import pytest from", "twice no calls are received from the method assert class_instance.property_1 == 1 assert", "'b' assert len(alias_dict) == 1 assert 'a' not in alias_dict assert 'alias_a' not", "assert len(alias_dict) == 0 assert 'baz' not in alias_dict assert 'foo' not in", "1 return cls.calls class_instance_1 = Class() assert class_instance_1.calls == 0 assert class_instance_1.prop ==", "[ ('pet.getBy Id', 'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id',", "id(None) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), }", "('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'), # leading/trailing", "== 'b' assert len(alias_dict) == 1 assert 'a' not in alias_dict assert 'alias_a'", "= AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert len(alias_dict) == 0 assert 'baz'", "'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2 assert set(alias_dict.items()) == set([('a', 'b'),", "assert class_instance_2.prop == 1 assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls = []", "+ id(3), } assert calls == [[1, None], [2, 3]] # Calling the", "'not there' assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict) == 1 assert 'a' not", "expected = [ { '$ref': '#/definitions/DayHours', }, ] assert expected == strip_xscope(fragment) assert", "}, ] assert expected == strip_xscope(fragment) assert 'x-scope' in fragment[0] def test_no_op(): fragment", "description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}},", "import strip_xscope def test_cached_property(): class Class(object): def __init__(self): self.calls = 0 @cached_property def", "that is not a dictionary', ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body', 'required': True,", "decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == {", "[] def function(a, b=None): calls.append([a, b]) return id(a) + id(b) decorated_function = memoize_by_id(function)", "test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty(): assert {} ==", "def test_contained_in_dict(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours',", "== id(1) + id(None) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1)", "assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a', id(1)), ('b',", "import memoize_by_id from bravado_core.util import ObjectType from bravado_core.util import RecursiveCallException from bravado_core.util import", "assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If property is called", "class_instance.property_1 == 2 assert class_instance.calls == 2 def test_class_cached_property(): class Class(object): calls =", "[[1, None], [2, 3]] # Calling the decorated method with known arguments will", "# double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal chars ('__foo__', 'foo'), #", "'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description': 'response description', 'parameters': {'param':", "test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a, b=None): calls.append([a, b]) return id(a) + id(b)", "decorated_function(2, 3) == id(2) + id(3) assert decorated_function.cache == { (('a', id(1)), ('b',", "strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def test_contained_in_list(): fragment = [ { '$ref': '#/definitions/DayHours',", "id(3))): id(2) + id(3), } assert calls == [[1, None], [2, 3]] #", "id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1, None]] assert decorated_function.cache == { (('a',", "# noqa ), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type", "assert set(alias_dict.items()) == set([('a', 'b'), ('c', 'd')]) assert 'alias_a' in alias_dict assert alias_dict['alias_a']", "deleted then the method is called again del class_instance.property_1 assert class_instance.property_1 == 2", "bravado_core.util import ObjectType from bravado_core.util import RecursiveCallException from bravado_core.util import sanitize_name from bravado_core.util", "from bravado_core.util import sanitize_name from bravado_core.util import strip_xscope def test_cached_property(): class Class(object): def", "], ) def test_sanitize_name(input, expected): assert sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict =", "calls = 0 @lazy_class_attribute def prop(cls): cls.calls += 1 return cls.calls class_instance_1 =", "decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls == [[1, None]] assert", "} assert expected == strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def test_contained_in_list(): fragment =", "{'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict,", "@pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'), #", "import sanitize_name from bravado_core.util import strip_xscope def test_cached_property(): class Class(object): def __init__(self): self.calls", "assert alias_dict.get('f', 'not there') == 'not there' assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict)", "[ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected = [ { '$ref': '#/definitions/DayHours', },", "expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict)", "them ], ) def test_sanitize_name(input, expected): assert sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict", "class_instance.calls == 2 def test_class_cached_property(): class Class(object): calls = 0 @lazy_class_attribute def prop(cls):", "multiple underscores ('100percent', 'percent'), # make sure we remove all digits ('100.0', '_100_0'),", "'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False, {'description':", "[[1, None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = []", "# leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of", "Class() assert class_instance_2.calls == 1 assert class_instance_2.prop == 1 assert class_instance_2.calls == 1", "('b', id(None))): id(1) + id(None), (('a', id(2)), ('b', id(3))): id(2) + id(3), }", "ObjectType from bravado_core.util import RecursiveCallException from bravado_core.util import sanitize_name from bravado_core.util import strip_xscope", "id(1) + id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) + id(None) assert not", "== expected_object_type def test_empty(): assert {} == strip_xscope({}) def test_contained_in_dict(): fragment = {", "Class() assert class_instance.calls == 0 assert class_instance.property_1 == 1 assert class_instance.calls == 1", "1 def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def function(a): calls.append(a) return function(a) with", "calls == [[1, None], [2, 3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls", "inspect import getcallargs import mock import pytest from bravado_core.util import AliasKeyDict from bravado_core.util", "= memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1) assert calls ==", "('pet.getBy Id', 'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'), # leading/trailing underscore ('get__Pet_By__Id', 'get_Pet_By_Id'),", "+ id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) + id(None) assert not mock_getcallargs.called", "is alias_dict.get('a') assert alias_dict.get('f', 'not there') == 'not there' assert alias_dict.pop('alias_a') == 'b'", "should keep them ], ) def test_sanitize_name(input, expected): assert sanitize_name(input) == expected def", "alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key", "{'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.UNKNOWN], # noqa ), ) def", "} @pytest.mark.parametrize( ('input', 'expected'), [ ('pet.getBy Id', 'pet_getBy_Id'), # simple case ('_getPetById_', 'getPetById'),", "<reponame>nickgaya/bravado-core # -*- coding: utf-8 -*- from inspect import getcallargs import mock import", "+ id(None) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None),", "'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2 assert set(alias_dict.items()) == set([('a',", "id(1)), ('b', id(None))): id(1) + id(None), } assert calls == [[1, None], [2,", "Class(object): def __init__(self): self.calls = 0 @cached_property def property_1(self): self.calls += 1 return", "{'200': {'description': 'response description'}}}}, ObjectType.PATH_ITEM], [True, {'description': 'response description', 'schema': {'type': 'object'}}, ObjectType.RESPONSE],", "{'type': 'object'}}, ObjectType.RESPONSE], [True, {'description': 'response description', 'parameters': {'param': {'type': 'object'}}}, ObjectType.SCHEMA], [False,", "] expected = [ { '$ref': '#/definitions/DayHours', }, ] assert expected == strip_xscope(fragment)", "'x-scope' in fragment[0] def test_no_op(): fragment = { 'MON': { '$ref': '#/definitions/DayHours', },", "the decorated method with known arguments will not call the inner method assert", "decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), } @pytest.mark.parametrize( ('input',", "'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal chars ('__foo__', 'foo'),", "def test_sanitize_name(input, expected): assert sanitize_name(input) == expected def test_AliasKeyDict(): alias_dict = AliasKeyDict({'a': 'b',", "lazy_class_attribute from bravado_core.util import memoize_by_id from bravado_core.util import ObjectType from bravado_core.util import RecursiveCallException", "def prop(cls): cls.calls += 1 return cls.calls class_instance_1 = Class() assert class_instance_1.calls ==", "class_instance_2 = Class() assert class_instance_2.calls == 1 assert class_instance_2.prop == 1 assert class_instance_2.calls", "bunch of illegal chars ('__foo__', 'foo'), # make sure we strip multiple underscores", "assert calls == [[1, None]] assert decorated_function.cache == { (('a', id(1)), ('b', id(None))):", "{ (('a', id(1)), ('b', id(None))): id(1) + id(None), (('a', id(2)), ('b', id(3))): id(2)", "('get__Pet_By__Id', 'get_Pet_By_Id'), # double underscores ('^&#@!$foo%+++:;\"<>?/', 'foo'), # bunch of illegal chars ('__foo__',", "(('a', id(1)), ('b', id(None))): id(1) + id(None), } assert calls == [[1, None],", "0 @lazy_class_attribute def prop(cls): cls.calls += 1 return cls.calls class_instance_1 = Class() assert", "== 1 assert class_instance_2.calls == 1 def test_memoize_by_id_decorator_recursive_call(): calls = [] @memoize_by_id def", "decorated_function(1) == id(1) + id(None) assert decorated_function.cache == { (('a', id(1)), ('b', id(None))):", "will not call the inner method assert decorated_function(1) == id(1) + id(None) assert", "with known arguments will not call the inner method assert decorated_function(1) == id(1)", "+= 1 return self.calls assert isinstance(Class.property_1, cached_property) class_instance = Class() assert class_instance.calls ==", "def function(a): calls.append(a) return function(a) with pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A] def", "assert 'baz' not in alias_dict assert 'foo' not in alias_dict @pytest.mark.parametrize( 'default_type_to_object, object_dict,", "{ 'MON': { '$ref': '#/definitions/DayHours', }, } expected = { 'MON': { '$ref':", "called again del class_instance.property_1 assert class_instance.property_1 == 2 assert class_instance.calls == 2 def", "assert expected == strip_xscope(fragment) assert 'x-scope' in fragment[0] def test_no_op(): fragment = {", "}, } assert expected == strip_xscope(fragment) assert 'x-scope' in fragment['MON'] def test_contained_in_list(): fragment", "{ '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected = [", "from inspect import getcallargs import mock import pytest from bravado_core.util import AliasKeyDict from", "sure we remove all digits ('100.0', '_100_0'), # a name consisting mostly of", "remove all digits ('100.0', '_100_0'), # a name consisting mostly of digits should", "), ) def test_determine_object_type(default_type_to_object, object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty():", "method is called again del class_instance.property_1 assert class_instance.property_1 == 2 assert class_instance.calls ==", "id(1) + id(None), } assert calls == [[1, None], [2, 3], [1, None]]", "assert 'alias_a' in alias_dict assert alias_dict['alias_a'] is alias_dict['a'] assert alias_dict.get('alias_a') is alias_dict.get('a') assert", "== 1 assert class_instance.calls == 1 # If property is called twice no", "[[1, None]] assert decorated_function(2, 3) == id(2) + id(3) assert decorated_function.cache == {", "alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items())", "= Class() assert class_instance.calls == 0 assert class_instance.property_1 == 1 assert class_instance.calls ==", "3], [1, None]] @mock.patch('bravado_core.util.inspect.getcallargs', wraps=getcallargs) def test_memoize_by_id_do_not_use_inspect_if_only_kwargs_are_provided(mock_getcallargs): calls = [] def function(a, b=None):", "alias_dict.copy() assert set(dict_copy.items()) == set(alias_dict.items()) assert dict_copy.alias_to_key == alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict =", "'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description': 'response description'}}}},", "pytest.raises(RecursiveCallException): function(mock.sentinel.A) assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls = [] def function(a,", "None]] assert decorated_function(2, 3) == id(2) + id(3) assert decorated_function.cache == { (('a',", "assert len(alias_dict) == 1 assert 'a' not in alias_dict assert 'alias_a' not in", "assert calls == [mock.sentinel.A] def test_memoize_by_id_decorator(): calls = [] def function(a, b=None): calls.append([a,", "property is deleted then the method is called again del class_instance.property_1 assert class_instance.property_1", "prop(cls): cls.calls += 1 return cls.calls class_instance_1 = Class() assert class_instance_1.calls == 0", "'body', 'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True, {'get': {'responses': {'200': {'description': 'response", "'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected = [ { '$ref': '#/definitions/DayHours', }, ] assert", "return self.calls assert isinstance(Class.property_1, cached_property) class_instance = Class() assert class_instance.calls == 0 assert", "del class_instance.property_1 assert class_instance.property_1 == 2 assert class_instance.calls == 2 def test_class_cached_property(): class", "[ { '$ref': '#/definitions/DayHours', 'x-scope': [ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected =", "alias_dict.get('alias_a') is alias_dict.get('a') assert alias_dict.get('f', 'not there') == 'not there' assert alias_dict.pop('alias_a') ==", "{ '$ref': '#/definitions/DayHours', }, } expected = { 'MON': { '$ref': '#/definitions/DayHours', },", "Calling the decorated method with known arguments will not call the inner method", "alias_dict.get('a') assert alias_dict.get('f', 'not there') == 'not there' assert alias_dict.pop('alias_a') == 'b' assert", "assert decorated_function.cache == { (('a', id(1)), ('b', id(None))): id(1) + id(None), } @pytest.mark.parametrize(", "+ id(b) decorated_function = memoize_by_id(function) assert decorated_function(1) == id(1) + id(None) mock_getcallargs.assert_called_once_with(function, 1)", "== 0 assert class_instance.property_1 == 1 assert class_instance.calls == 1 # If property", "make sure we strip multiple underscores ('100percent', 'percent'), # make sure we remove", "Class(object): calls = 0 @lazy_class_attribute def prop(cls): cls.calls += 1 return cls.calls class_instance_1", "}, } expected = { 'MON': { '$ref': '#/definitions/DayHours', }, } assert expected", "memoize_by_id from bravado_core.util import ObjectType from bravado_core.util import RecursiveCallException from bravado_core.util import sanitize_name", "id(None), } mock_getcallargs.reset_mock() assert decorated_function(a=1) == id(1) + id(None) assert not mock_getcallargs.called assert", "@lazy_class_attribute def prop(cls): cls.calls += 1 return cls.calls class_instance_1 = Class() assert class_instance_1.calls", "[ 'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, } expected = { 'MON': { '$ref': '#/definitions/DayHours',", "'file:///happyhour/api_docs/swagger.json', 'file:///happyhour/api_docs/swagger.json#/definitions/WeekHours', ], }, ] expected = [ { '$ref': '#/definitions/DayHours', }, ]", "property_1(self): self.calls += 1 return self.calls assert isinstance(Class.property_1, cached_property) class_instance = Class() assert", "None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1) == id(1) + id(None) assert decorated_function.cache ==", "== alias_dict.alias_to_key def test_AliasKeyDict_del(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') del alias_dict['baz'] assert", "object_dict, expected_object_type): assert determine_object_type(object_dict, default_type_to_object) == expected_object_type def test_empty(): assert {} == strip_xscope({})", "import mock import pytest from bravado_core.util import AliasKeyDict from bravado_core.util import cached_property from", "mostly of digits should keep them ], ) def test_sanitize_name(input, expected): assert sanitize_name(input)", "'MON': { '$ref': '#/definitions/DayHours', }, } expected = { 'MON': { '$ref': '#/definitions/DayHours',", "= AliasKeyDict({'a': 'b', 'c': 'd'}) alias_dict.add_alias('alias_a', 'a') assert len(alias_dict) == 2 assert set(alias_dict.items())", "'foo'), # make sure we strip multiple underscores ('100percent', 'percent'), # make sure", "'foo'), # bunch of illegal chars ('__foo__', 'foo'), # make sure we strip", "assert alias_dict.pop('alias_a') == 'b' assert len(alias_dict) == 1 assert 'a' not in alias_dict", "in alias_dict def test_AliasKeyDict_copy(): alias_dict = AliasKeyDict([('foo', 'bar')]) alias_dict.add_alias('baz', 'foo') dict_copy = alias_dict.copy()", "= Class() assert class_instance_2.calls == 1 assert class_instance_2.prop == 1 assert class_instance_2.calls ==", "ObjectType.UNKNOWN], [True, {'in': 'body', 'name': 'body', 'required': True, 'schema': {'type': 'object'}}, ObjectType.PARAMETER], [True,", "expected_object_type def test_empty(): assert {} == strip_xscope({}) def test_contained_in_dict(): fragment = { 'MON':", "('100.0', '_100_0'), # a name consisting mostly of digits should keep them ],", "== [[1, None], [2, 3]] decorated_function.cache.clear() assert decorated_function(1) == id(1) + id(None) assert", "+ id(None), } assert calls == [[1, None]] assert decorated_function(2, 3) == id(2)" ]
[ "discord class Item: def to_discord(self) -> Any: pass def check(self, func: Callable[[discord.Interaction], bool])", "Any, Callable import discord class Item: def to_discord(self) -> Any: pass def check(self,", "typing import Any, Callable import discord class Item: def to_discord(self) -> Any: pass", "import Any, Callable import discord class Item: def to_discord(self) -> Any: pass def", "Callable import discord class Item: def to_discord(self) -> Any: pass def check(self, func:", "Item: def to_discord(self) -> Any: pass def check(self, func: Callable[[discord.Interaction], bool]) -> 'Item':", "class Item: def to_discord(self) -> Any: pass def check(self, func: Callable[[discord.Interaction], bool]) ->", "def to_discord(self) -> Any: pass def check(self, func: Callable[[discord.Interaction], bool]) -> 'Item': pass", "import discord class Item: def to_discord(self) -> Any: pass def check(self, func: Callable[[discord.Interaction],", "from typing import Any, Callable import discord class Item: def to_discord(self) -> Any:" ]
[ "name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class Search(Bgg): items: List[Item] @classmethod def create(cls,", "OrderedDict: items = [items] _items = [Item.create(x) for x in items] return Search(items=_items)", "items is None: return None if type(items) == OrderedDict: items = [items] _items", "create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items is None: return None if type(items) ==", "import List, Optional, Union from .bgg import Bgg class Item(Bgg): id: int type:", "Bgg class Item(Bgg): id: int type: str name: Optional[str] yearpublished: Optional[int] @classmethod def", "Union[OrderedDict, List[OrderedDict]]): if items is None: return None if type(items) == OrderedDict: items", "def create(cls, itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), )", "_item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class Search(Bgg):", "items: Union[OrderedDict, List[OrderedDict]]): if items is None: return None if type(items) == OrderedDict:", "if type(items) == OrderedDict: items = [items] _items = [Item.create(x) for x in", "OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class", "class Item(Bgg): id: int type: str name: Optional[str] yearpublished: Optional[int] @classmethod def create(cls,", "yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class Search(Bgg): items: List[Item] @classmethod def create(cls, items:", "_item class Search(Bgg): items: List[Item] @classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items", "List[OrderedDict]]): if items is None: return None if type(items) == OrderedDict: items =", "Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class Search(Bgg): items: List[Item]", "create(cls, itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return", "Search(Bgg): items: List[Item] @classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items is None:", "return None if type(items) == OrderedDict: items = [items] _items = [Item.create(x) for", "type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class Search(Bgg): items: List[Item] @classmethod def", "List, Optional, Union from .bgg import Bgg class Item(Bgg): id: int type: str", "class Search(Bgg): items: List[Item] @classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items is", "id: int type: str name: Optional[str] yearpublished: Optional[int] @classmethod def create(cls, itme: OrderedDict):", "None: return None if type(items) == OrderedDict: items = [items] _items = [Item.create(x)", "Optional[str] yearpublished: Optional[int] @classmethod def create(cls, itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"),", "type(items) == OrderedDict: items = [items] _items = [Item.create(x) for x in items]", "if items is None: return None if type(items) == OrderedDict: items = [items]", "itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item", "{}).get(\"@value\")), ) return _item class Search(Bgg): items: List[Item] @classmethod def create(cls, items: Union[OrderedDict,", "yearpublished: Optional[int] @classmethod def create(cls, itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")),", "OrderedDict from typing import List, Optional, Union from .bgg import Bgg class Item(Bgg):", "name: Optional[str] yearpublished: Optional[int] @classmethod def create(cls, itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")),", "str name: Optional[str] yearpublished: Optional[int] @classmethod def create(cls, itme: OrderedDict): _item = Item(", "Optional, Union from .bgg import Bgg class Item(Bgg): id: int type: str name:", "@classmethod def create(cls, itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")),", "import Bgg class Item(Bgg): id: int type: str name: Optional[str] yearpublished: Optional[int] @classmethod", "from collections import OrderedDict from typing import List, Optional, Union from .bgg import", "id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class Search(Bgg): items: List[Item] @classmethod", ") return _item class Search(Bgg): items: List[Item] @classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]):", "def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items is None: return None if type(items)", "collections import OrderedDict from typing import List, Optional, Union from .bgg import Bgg", "== OrderedDict: items = [items] _items = [Item.create(x) for x in items] return", "import OrderedDict from typing import List, Optional, Union from .bgg import Bgg class", "from typing import List, Optional, Union from .bgg import Bgg class Item(Bgg): id:", "Union from .bgg import Bgg class Item(Bgg): id: int type: str name: Optional[str]", "Item(Bgg): id: int type: str name: Optional[str] yearpublished: Optional[int] @classmethod def create(cls, itme:", "Optional[int] @classmethod def create(cls, itme: OrderedDict): _item = Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\",", "typing import List, Optional, Union from .bgg import Bgg class Item(Bgg): id: int", ".bgg import Bgg class Item(Bgg): id: int type: str name: Optional[str] yearpublished: Optional[int]", "return _item class Search(Bgg): items: List[Item] @classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if", "= Item( id=Bgg.parse_int(itme.get(\"@id\")), type=itme.get(\"@type\"), name=Bgg.get_primary_name(itme.get(\"name\")), yearpublished=Bgg.parse_int(itme.get(\"yearpublished\", {}).get(\"@value\")), ) return _item class Search(Bgg): items:", "from .bgg import Bgg class Item(Bgg): id: int type: str name: Optional[str] yearpublished:", "type: str name: Optional[str] yearpublished: Optional[int] @classmethod def create(cls, itme: OrderedDict): _item =", "@classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items is None: return None if", "List[Item] @classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items is None: return None", "items: List[Item] @classmethod def create(cls, items: Union[OrderedDict, List[OrderedDict]]): if items is None: return", "None if type(items) == OrderedDict: items = [items] _items = [Item.create(x) for x", "int type: str name: Optional[str] yearpublished: Optional[int] @classmethod def create(cls, itme: OrderedDict): _item", "is None: return None if type(items) == OrderedDict: items = [items] _items =" ]
[ "U = np.eye(2 ** self.n_qubit) for lay in range(layer): U1 = np.kron( np.kron(", "def getGradient1(self, theta, num_para, init): # 返回PSR的结果 left = deepcopy(theta) right = deepcopy(theta)", "/ 2)) row = np.hstack((a, b)) col = np.hstack((b, a)) data = (np.ones_like(row)", "+ 7])) U = U @ U5 @ U4 @ U3 @ U2", "np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8", "#随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l", "get_accuracy(self, expect, label): # expect的shape为:[num, n_class] acc = 0 for j in range(expect.shape[0]):", "= np.zeros(epochs) for epoch in range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for i", "self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr * (soft_e", "2)) row = np.hstack((a, b)) col = np.hstack((b, a)) data = (np.ones_like(row) -", "test_acc def getGradient1(self, theta, num_para, init): # 返回PSR的结果 left = deepcopy(theta) right =", "@ U5 @ U4 @ U3 @ U2 @ U1 return U def", "len(theta) // 8 U = np.eye(2 ** self.n_qubit) for lay in range(layer): U1", "QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay *", "numpy as np import scipy.sparse as sp from numpy import pi, sin, cos,", "return x def partial_NLL(self, x, y): return sum(x - y) def Ry(self, theta):", "np.zeros(epochs) for epoch in range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for i in", "psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr * (soft_e - y[i]).reshape((1,", "/ 2)) b = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) row =", "2 * np.random.randint(low=0, high=2, size=len(row))) * prop residuals = sp.coo_matrix((data, (row, col)), shape=(2708,", "res[self.map[i][0], 0] ** 2 + res[self.map[i][1], 0] ** 2 return expect def getBlock1(self,", "+ 2])), self.Ry(theta[lay * 8 + 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8", "- y) def Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta,", "epochs, lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs) test_acc", "for lay in range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 + 0]),", "expect_r = np.zeros(self.n_class) for i in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] ** 2", "j in range(nums): for i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return", "size=int(13264 * prop / 2)) b = np.random.randint(low=0, high=2708, size=int(13264 * prop /", "range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1], 0] ** 2 return", "y): return sum(x - y) def Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]])", "def test(self, theta, init): test_expect = np.zeros((len(self.label) - self.nums, self.n_class)) for n in", "delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i]", "= QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7])) U = U @ U5 @", "A return x def partial_NLL(self, x, y): return sum(x - y) def Ry(self,", "loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch,", "for k in range(len(x)): x[k] = np.exp(x[k]) / A return x def partial_NLL(self,", "x[k] = np.exp(x[k]) / A return x def partial_NLL(self, x, y): return sum(x", "self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -= delta / self.nums # 更新参数 tmp =", "= np.zeros_like(theta) # 每个epoch更新一次梯度 for i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] =", "test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch])", "range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label): ind =", "= out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1], 0] ** 2 return expect_r -", "len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return", "self.n_qubit) for lay in range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 +", "delta[t] += lr * (soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -=", "pi / 4 right[num_para] = right[num_para] + pi / 4 # 左边 out_l", "sin, cos, sqrt, exp from copy import deepcopy class Networks: def __init__(self, n_qubit,", "* 8 + 0]), self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay * 8 +", "= np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) b = np.random.randint(low=0, high=2708, size=int(13264", "in range(self.nums): tmp -= np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch] = tmp acc[epoch]", "= np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay *", "QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7])) U = U @ U5 @ U4", "= nums self.label = label self.n_class = n_class self.n_qubit = n_qubit self.map =", "y = np.zeros((self.nums, self.n_class)) for k in range(self.nums): y[k, label[k]] = 1 loss", "nums, label): self.nums = nums self.label = label self.n_class = n_class self.n_qubit =", "= {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit))", "** 2 + out_r[self.map[i][1], 0] ** 2 return expect_r - expect_l def getExpectation1(self,", "for i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10 def", "self.n_class)) acc = np.zeros(epochs) test_acc = np.zeros(epochs) psi = init[0:self.nums] # 训练集数据 #", "输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs) test_acc = np.zeros(epochs) psi", "8 + 7])) U = U @ U5 @ U4 @ U3 @", "return psi def train1(self, theta, init, label, epochs, lr): # 输入为随机采样后的量子态集合 expect =", "block QG = QuantumGate() layer = len(theta) // 8 U = np.eye(2 **", "out_r[self.map[i][1], 0] ** 2 return expect_r - expect_l def getExpectation1(self, theta, init): res", "random from QuantumGate import QuantumGate import numpy as np import scipy.sparse as sp", "return psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度 for j", "U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay * 8 +", "np.eye(2 ** self.n_qubit) for lay in range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay *", "U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7])) U = U @ U5", "self.n_class)) for n in range(self.nums, len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1))", "2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j in", "psi[j,i] = psi[j,i] / s return psi def train1(self, theta, init, label, epochs,", "@ init expect = np.zeros(self.n_class) for i in range(self.n_class): expect[i] = res[self.map[i][0], 0]", "range(self.nums, len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:])", "for j in range(expect.shape[0]): arg = np.argmax(expect[j]) if arg == label[j]: acc +=", "epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss, acc, test_acc", "1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self, expect, label): # expect的shape为:[num,", "self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch],", "prop / 2)) row = np.hstack((a, b)) col = np.hstack((b, a)) data =", "2 + out_r[self.map[i][1], 0] ** 2 return expect_r - expect_l def getExpectation1(self, theta,", "self.label = label self.n_class = n_class self.n_qubit = n_qubit self.map = {0:[0,1], 1:[2,3],", "in range(l): psi[j,i] = psi[j,i] / s return psi def train1(self, theta, init,", "# 更新参数 tmp = 0 for i in range(self.nums): tmp -= np.log(expect[epoch, i,", "0 for j in range(expect.shape[0]): arg = np.argmax(expect[j]) if arg == label[j]: acc", "self.Ry(theta[lay * 8 + 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])),", "计算损失函数 loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init) print('第',", "return U def Softmax(self, x): A = sum(np.exp(x)) for k in range(len(x)): x[k]", "acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch],", "high=2708, size=int(13264 * prop / 2)) b = np.random.randint(low=0, high=2708, size=int(13264 * prop", "# 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2))", "expect的shape为:[num, n_class] acc = 0 for j in range(expect.shape[0]): arg = np.argmax(expect[j]) if", "{0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for", "np.argmax(expect[j]) if arg == label[j]: acc += 1 return acc def get_residuals(self, prop):", "A = sum(np.exp(x)) for k in range(len(x)): x[k] = np.exp(x[k]) / A return", "np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay *", "np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay", "i])) delta[t] += lr * (soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta", "expect, label): # expect的shape为:[num, n_class] acc = 0 for j in range(expect.shape[0]): arg", "psi #随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def normalize(self,psi):", "for i in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1], 0]", "range(self.n_class): expect[i] = res[self.map[i][0], 0] ** 2 + res[self.map[i][1], 0] ** 2 return", "tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:',", "+ 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7])) U = U", "expect = np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs) test_acc = np.zeros(epochs) psi =", "def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j in range(nums): for i in range(2**self.n_qubit):", "1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr * (soft_e - y[i]).reshape((1, self.n_class))", "np.random.randint(low=0, high=2, size=len(row))) * prop residuals = sp.coo_matrix((data, (row, col)), shape=(2708, 2708)) return", "a = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) b = np.random.randint(low=0, high=2708,", "8 U = np.eye(2 ** self.n_qubit) for lay in range(layer): U1 = np.kron(", "0] ** 2 + out_l[self.map[i][1], 0] ** 2 # 右边 out_r = self.getBlock1(theta=right)", "as sp from numpy import pi, sin, cos, sqrt, exp from copy import", "self.n_qubit = n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def", "4 # 左边 out_l = self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class) for i", "layer = len(theta) // 8 U = np.eye(2 ** self.n_qubit) for lay in", "= self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:',", "4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I) U4", "QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 +", "self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi =", "def partial_NLL(self, x, y): return sum(x - y) def Ry(self, theta): return np.array([[cos(theta),", "from numpy import pi, sin, cos, sqrt, exp from copy import deepcopy class", "(soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -= delta / self.nums #", "2 + out_l[self.map[i][1], 0] ** 2 # 右边 out_r = self.getBlock1(theta=right) @ init", "from copy import deepcopy class Networks: def __init__(self, n_qubit, n_class, nums, label): self.nums", "loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss, acc, test_acc def getGradient1(self, theta,", "= out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1], 0] ** 2 # 右边 out_r", "k in range(len(x)): x[k] = np.exp(x[k]) / A return x def partial_NLL(self, x,", "# 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) b =", "QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6]))) U5 = QG.UnC1(n=2,", "import scipy.sparse as sp from numpy import pi, sin, cos, sqrt, exp from", "num_para, init): # 返回PSR的结果 left = deepcopy(theta) right = deepcopy(theta) left[num_para] = left[num_para]", "for i in range(self.n_class): expect[i] = res[self.map[i][0], 0] ** 2 + res[self.map[i][1], 0]", "5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6]))) U5 =", "test(self, theta, init): test_expect = np.zeros((len(self.label) - self.nums, self.n_class)) for n in range(self.nums,", "= right[num_para] + pi / 4 # 左边 out_l = self.getBlock1(theta=left) @ init", "- self.nums, self.n_class)) for n in range(self.nums, len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta,", "= random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度", "0] ** 2 # 右边 out_r = self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class)", "8 + 1])), self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay * 8 + 3]))", "= sum(np.exp(x)) for k in range(len(x)): x[k] = np.exp(x[k]) / A return x", "label y = np.zeros((self.nums, self.n_class)) for k in range(self.nums): y[k, label[k]] = 1", "U=self.Ry(theta=theta[lay * 8 + 7])) U = U @ U5 @ U4 @", "# 计算损失函数 loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init)", "expect_l[i] = out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1], 0] ** 2 # 右边", "psi.shape # h是psi的个数,l是每个psi的维度 for j in range(h): s = sqrt(sum(psi[j]**2)) for i in", "U5 @ U4 @ U3 @ U2 @ U1 return U def Softmax(self,", "self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay * 8", "sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i] = psi[j,i] / s return psi def", "1)) for t in range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e =", "+ 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I)", "n_class] acc = 0 for j in range(expect.shape[0]): arg = np.argmax(expect[j]) if arg", "(np.ones_like(row) - 2 * np.random.randint(low=0, high=2, size=len(row))) * prop residuals = sp.coo_matrix((data, (row,", "0] ** 2 + out_r[self.map[i][1], 0] ** 2 return expect_r - expect_l def", "out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1], 0] ** 2 # 右边 out_r =", "psi def train1(self, theta, init, label, epochs, lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs,", "acc def get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0,", "x, y): return sum(x - y) def Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta),", "np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I) U4 =", "expect[i] = res[self.map[i][0], 0] ** 2 + res[self.map[i][1], 0] ** 2 return expect", "import QuantumGate import numpy as np import scipy.sparse as sp from numpy import", "range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay * 8", "8 + 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4)) U3", "# 每个epoch更新一次梯度 for i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]),", "self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr * (soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1))", "self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class) for i in range(self.n_class): expect_r[i] = out_r[self.map[i][0],", "return sum(x - y) def Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def", "def Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta, init): test_expect", "右边 out_r = self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class) for i in range(self.n_class):", "U def Softmax(self, x): A = sum(np.exp(x)) for k in range(len(x)): x[k] =", "2])), self.Ry(theta[lay * 8 + 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 +", "range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)):", "for i in range(self.nums): tmp -= np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch] =", "init): # 返回PSR的结果 left = deepcopy(theta) right = deepcopy(theta) left[num_para] = left[num_para] -", "0] ** 2 return expect def getBlock1(self, theta): # 常见的PQC block QG =", "+= 1 return acc def get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop", "getExpectation1(self, theta, init): res = self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class) for i", "+ 0]), self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay", "in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1], 0] ** 2", "每个epoch更新一次梯度 for i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1))", "1])), self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay * 8 + 3])) U2 =", "psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e", "Networks: def __init__(self, n_qubit, n_class, nums, label): self.nums = nums self.label = label", "def train1(self, theta, init, label, epochs, lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums,", "# h是psi的个数,l是每个psi的维度 for j in range(h): s = sqrt(sum(psi[j]**2)) for i in range(l):", "+ 1])), self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay * 8 + 3])) U2", "0] ** 2 + res[self.map[i][1], 0] ** 2 return expect def getBlock1(self, theta):", "# y = label y = np.zeros((self.nums, self.n_class)) for k in range(self.nums): y[k,", "= np.hstack((a, b)) col = np.hstack((b, a)) data = (np.ones_like(row) - 2 *", "y[k, label[k]] = 1 loss = np.zeros(epochs) for epoch in range(epochs): delta =", "@ grad_e.reshape((self.n_class, 1)) theta -= delta / self.nums # 更新参数 tmp = 0", "init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss,", "- 2 * np.random.randint(low=0, high=2, size=len(row))) * prop residuals = sp.coo_matrix((data, (row, col)),", "我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) b", "i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)): grad_e = self.getGradient1(theta, t,", "self.nums, self.n_class)) acc = np.zeros(epochs) test_acc = np.zeros(epochs) psi = init[0:self.nums] # 训练集数据", "= np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0,", "label self.n_class = n_class self.n_qubit = n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7],", "= label y = np.zeros((self.nums, self.n_class)) for k in range(self.nums): y[k, label[k]] =", "1 return acc def get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a", "= self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr * (soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class,", "** 2 return expect def getBlock1(self, theta): # 常见的PQC block QG = QuantumGate()", "for j in range(nums): for i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i]", "/ 4 right[num_para] = right[num_para] + pi / 4 # 左边 out_l =", "+ out_l[self.map[i][1], 0] ** 2 # 右边 out_r = self.getBlock1(theta=right) @ init expect_r", "self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self,", "- expect_l def getExpectation1(self, theta, init): res = self.getBlock1(theta=theta) @ init expect =", "+ 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4)) U3 =", "size=int(13264 * prop / 2)) row = np.hstack((a, b)) col = np.hstack((b, a))", "U4 @ U3 @ U2 @ U1 return U def Softmax(self, x): A", "4 right[num_para] = right[num_para] + pi / 4 # 左边 out_l = self.getBlock1(theta=left)", "# adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264 * prop", "in range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t]", "np import scipy.sparse as sp from numpy import pi, sin, cos, sqrt, exp", "= self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class) for i in range(self.n_class): expect_l[i] =", "acc += 1 return acc def get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, #", "np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs) test_acc = np.zeros(epochs) psi = init[0:self.nums] #", "8 + 0]), self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay * 8 + 2])),", "range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch,", "psi = np.zeros((nums,2**self.n_qubit)) for j in range(nums): for i in range(2**self.n_qubit): psi[j,i] =", "'test_acc', test_acc[epoch]) return theta, loss, acc, test_acc def getGradient1(self, theta, num_para, init): #", "归一化 def normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度 for j in range(h): s", "+ 5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6]))) U5", "= 0 for j in range(expect.shape[0]): arg = np.argmax(expect[j]) if arg == label[j]:", "i in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1], 0] **", "y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -= delta / self.nums # 更新参数 tmp", "* 8 + 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4))", "= U @ U5 @ U4 @ U3 @ U2 @ U1 return", "grad_e.reshape((self.n_class, 1)) theta -= delta / self.nums # 更新参数 tmp = 0 for", "self.nums = nums self.label = label self.n_class = n_class self.n_qubit = n_qubit self.map", "np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta, init): test_expect = np.zeros((len(self.label) - self.nums,", "self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self, expect, label): # expect的shape为:[num, n_class] acc =", "(1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind]", "* 8 + 5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 +", "= sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i] = psi[j,i] / s return psi", "U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8", "expect def getBlock1(self, theta): # 常见的PQC block QG = QuantumGate() layer = len(theta)", "U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I) U4 = np.kron(np.eye(4),", "def get_accuracy(self, expect, label): # expect的shape为:[num, n_class] acc = 0 for j in", "for t in range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch,", "1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j", "= tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch, '次迭代,',", "= res[self.map[i][0], 0] ** 2 + res[self.map[i][1], 0] ** 2 return expect def", "in range(nums): for i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return psi", "= psi.shape # h是psi的个数,l是每个psi的维度 for j in range(h): s = sqrt(sum(psi[j]**2)) for i", "先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)): grad_e =", "out_l[self.map[i][1], 0] ** 2 # 右边 out_r = self.getBlock1(theta=right) @ init expect_r =", "@ U4 @ U3 @ U2 @ U1 return U def Softmax(self, x):", "test_acc def get_accuracy(self, expect, label): # expect的shape为:[num, n_class] acc = 0 for j", "randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l = psi.shape", "np.zeros(self.n_class) for i in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1],", "import random from QuantumGate import QuantumGate import numpy as np import scipy.sparse as", "# 右边 out_r = self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class) for i in", "2)) b = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) row = np.hstack((a,", "prop / 2)) b = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) row", "random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度 for", "sum(np.exp(x)) for k in range(len(x)): x[k] = np.exp(x[k]) / A return x def", "__init__(self, n_qubit, n_class, nums, label): self.nums = nums self.label = label self.n_class =", "ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l = psi.shape #", "if arg == label[j]: acc += 1 return acc def get_residuals(self, prop): #", "常见的PQC block QG = QuantumGate() layer = len(theta) // 8 U = np.eye(2", "= psi[j,i] / s return psi def train1(self, theta, init, label, epochs, lr):", "in range(self.nums): y[k, label[k]] = 1 loss = np.zeros(epochs) for epoch in range(epochs):", "self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1))", "= n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init):", "2 return expect_r - expect_l def getExpectation1(self, theta, init): res = self.getBlock1(theta=theta) @", "label[k]] = 1 loss = np.zeros(epochs) for epoch in range(epochs): delta = np.zeros_like(theta)", "np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay * 8", "high=2708, size=int(13264 * prop / 2)) row = np.hstack((a, b)) col = np.hstack((b,", "for epoch in range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for i in range(self.nums):", "self.label[self.nums:]) return test_acc def get_accuracy(self, expect, label): # expect的shape为:[num, n_class] acc = 0", "test_acc = np.zeros(epochs) psi = init[0:self.nums] # 训练集数据 # y = label y", "delta / self.nums # 更新参数 tmp = 0 for i in range(self.nums): tmp", "deepcopy(theta) left[num_para] = left[num_para] - pi / 4 right[num_para] = right[num_para] + pi", "out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1], 0] ** 2 return expect_r - expect_l", "np.hstack((a, b)) col = np.hstack((b, a)) data = (np.ones_like(row) - 2 * np.random.randint(low=0,", "psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度 for j in", "= (1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return", "s return psi def train1(self, theta, init, label, epochs, lr): # 输入为随机采样后的量子态集合 expect", "Softmax(self, x): A = sum(np.exp(x)) for k in range(len(x)): x[k] = np.exp(x[k]) /", "left[num_para] = left[num_para] - pi / 4 right[num_para] = right[num_para] + pi /", "acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss, acc, test_acc def getGradient1(self, theta, num_para, init):", "n_class self.n_qubit = n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]}", "res = self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class) for i in range(self.n_class): expect[i]", "theta -= delta / self.nums # 更新参数 tmp = 0 for i in", "# 左边 out_l = self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class) for i in", "@ init expect_r = np.zeros(self.n_class) for i in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0]", "= np.eye(2 ** self.n_qubit) for lay in range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay", "2 # 右边 out_r = self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class) for i", "* (soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -= delta / self.nums", "lay in range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay", "init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化", "deepcopy(theta) right = deepcopy(theta) left[num_para] = left[num_para] - pi / 4 right[num_para] =", "def get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708,", "Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta, init): test_expect =", "cos(theta)]]) def test(self, theta, init): test_expect = np.zeros((len(self.label) - self.nums, self.n_class)) for n", "U3 @ U2 @ U1 return U def Softmax(self, x): A = sum(np.exp(x))", "3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j in range(nums):", "= np.zeros(epochs) psi = init[0:self.nums] # 训练集数据 # y = label y =", "return acc def get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a =", "label[j]: acc += 1 return acc def get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop,", "init expect_l = np.zeros(self.n_class) for i in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] **", "out_l = self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class) for i in range(self.n_class): expect_l[i]", "return test_acc def get_accuracy(self, expect, label): # expect的shape为:[num, n_class] acc = 0 for", "test_expect = np.zeros((len(self.label) - self.nums, self.n_class)) for n in range(self.nums, len(self.label)): test_expect[n -", "= np.zeros(epochs) test_acc = np.zeros(epochs) psi = init[0:self.nums] # 训练集数据 # y =", "= np.zeros((len(self.label) - self.nums, self.n_class)) for n in range(self.nums, len(self.label)): test_expect[n - self.nums]", "= np.zeros(self.n_class) for i in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] ** 2 +", "np.exp(x[k]) / A return x def partial_NLL(self, x, y): return sum(x - y)", "in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in", "U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8", "= self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]),", "cos, sqrt, exp from copy import deepcopy class Networks: def __init__(self, n_qubit, n_class,", "partial_NLL(self, x, y): return sum(x - y) def Ry(self, theta): return np.array([[cos(theta), -sin(theta)],", "expect = np.zeros(self.n_class) for i in range(self.n_class): expect[i] = res[self.map[i][0], 0] ** 2", "/ 4 # 左边 out_l = self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class) for", "getGradient1(self, theta, num_para, init): # 返回PSR的结果 left = deepcopy(theta) right = deepcopy(theta) left[num_para]", "0 for i in range(self.nums): tmp -= np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch]", "== label[j]: acc += 1 return acc def get_residuals(self, prop): # adj中共有13264条边, #", "self.nums, self.n_class)) for n in range(self.nums, len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]),", "in range(expect.shape[0]): arg = np.argmax(expect[j]) if arg == label[j]: acc += 1 return", "range(l): psi[j,i] = psi[j,i] / s return psi def train1(self, theta, init, label,", "self.nums # 更新参数 tmp = 0 for i in range(self.nums): tmp -= np.log(expect[epoch,", "acc, test_acc def getGradient1(self, theta, num_para, init): # 返回PSR的结果 left = deepcopy(theta) right", "acc = np.zeros(epochs) test_acc = np.zeros(epochs) psi = init[0:self.nums] # 训练集数据 # y", "7])) U = U @ U5 @ U4 @ U3 @ U2 @", "b = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) row = np.hstack((a, b))", "y = label y = np.zeros((self.nums, self.n_class)) for k in range(self.nums): y[k, label[k]]", "init[0:self.nums] # 训练集数据 # y = label y = np.zeros((self.nums, self.n_class)) for k", "import pi, sin, cos, sqrt, exp from copy import deepcopy class Networks: def", "in range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay *", "// 8 U = np.eye(2 ** self.n_qubit) for lay in range(layer): U1 =", "self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self, expect, label):", "in range(h): s = sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i] = psi[j,i] /", "label, epochs, lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs)", "* 8 + 2])), self.Ry(theta[lay * 8 + 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay", "sum(x - y) def Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self,", "self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta,", "return expect def getBlock1(self, theta): # 常见的PQC block QG = QuantumGate() layer =", "theta, loss, acc, test_acc def getGradient1(self, theta, num_para, init): # 返回PSR的结果 left =", "res[self.map[i][1], 0] ** 2 return expect def getBlock1(self, theta): # 常见的PQC block QG", "for j in range(h): s = sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i] =", "h,l = psi.shape # h是psi的个数,l是每个psi的维度 for j in range(h): s = sqrt(sum(psi[j]**2)) for", "init, label, epochs, lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class)) acc =", "for i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for", "arg == label[j]: acc += 1 return acc def get_residuals(self, prop): # adj中共有13264条边,", "copy import deepcopy class Networks: def __init__(self, n_qubit, n_class, nums, label): self.nums =", "b)) col = np.hstack((b, a)) data = (np.ones_like(row) - 2 * np.random.randint(low=0, high=2,", "loss, acc, test_acc def getGradient1(self, theta, num_para, init): # 返回PSR的结果 left = deepcopy(theta)", "= np.kron( np.kron( np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay * 8 + 1])),", "U @ U5 @ U4 @ U3 @ U2 @ U1 return U", "def getExpectation1(self, theta, init): res = self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class) for", "i in range(l): psi[j,i] = psi[j,i] / s return psi def train1(self, theta,", "i, label[i]]) # 计算损失函数 loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] =", "expect_r - expect_l def getExpectation1(self, theta, init): res = self.getBlock1(theta=theta) @ init expect", "-= np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch], label)", "0]), self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay *", "np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch]", "n_qubit, n_class, nums, label): self.nums = nums self.label = label self.n_class = n_class", "8 + 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7])) U =", "U = U @ U5 @ U4 @ U3 @ U2 @ U1", "QuantumGate import numpy as np import scipy.sparse as sp from numpy import pi,", "<gh_stars>1-10 import random from QuantumGate import QuantumGate import numpy as np import scipy.sparse", "label[i]]) # 计算损失函数 loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch], label) test_acc[epoch] = self.test(theta=theta,", "tmp = 0 for i in range(self.nums): tmp -= np.log(expect[epoch, i, label[i]]) #", "arg = np.argmax(expect[j]) if arg == label[j]: acc += 1 return acc def", "'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss, acc, test_acc def getGradient1(self,", "-= delta / self.nums # 更新参数 tmp = 0 for i in range(self.nums):", "test_acc[epoch]) return theta, loss, acc, test_acc def getGradient1(self, theta, num_para, init): # 返回PSR的结果", "range(nums): for i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10", "range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] +=", "= self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class) for i in range(self.n_class): expect[i] =", "-sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta, init): test_expect = np.zeros((len(self.label) - self.nums, self.n_class))", "+ res[self.map[i][1], 0] ** 2 return expect def getBlock1(self, theta): # 常见的PQC block", "5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j in range(nums): for i", "= self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr *", "return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta, init): test_expect = np.zeros((len(self.label) -", "= np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) row = np.hstack((a, b)) col", "exp from copy import deepcopy class Networks: def __init__(self, n_qubit, n_class, nums, label):", "t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr * (soft_e -", "= len(theta) // 8 U = np.eye(2 ** self.n_qubit) for lay in range(layer):", "class Networks: def __init__(self, n_qubit, n_class, nums, label): self.nums = nums self.label =", "h是psi的个数,l是每个psi的维度 for j in range(h): s = sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i]", "psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums)", "x def partial_NLL(self, x, y): return sum(x - y) def Ry(self, theta): return", "np.zeros(self.n_class) for i in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1],", "theta): # 常见的PQC block QG = QuantumGate() layer = len(theta) // 8 U", "np.zeros_like(theta) # 每个epoch更新一次梯度 for i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta,", "= left[num_para] - pi / 4 right[num_para] = right[num_para] + pi / 4", "test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc", "psi = init[0:self.nums] # 训练集数据 # y = label y = np.zeros((self.nums, self.n_class))", "* init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] #", "range(expect.shape[0]): arg = np.argmax(expect[j]) if arg == label[j]: acc += 1 return acc", "= QuantumGate() layer = len(theta) // 8 U = np.eye(2 ** self.n_qubit) for", "= deepcopy(theta) right = deepcopy(theta) left[num_para] = left[num_para] - pi / 4 right[num_para]", "acc = 0 for j in range(expect.shape[0]): arg = np.argmax(expect[j]) if arg ==", "训练集数据 # y = label y = np.zeros((self.nums, self.n_class)) for k in range(self.nums):", "* 8 + 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7])) U", "import numpy as np import scipy.sparse as sp from numpy import pi, sin,", "in range(self.n_class): expect[i] = res[self.map[i][0], 0] ** 2 + res[self.map[i][1], 0] ** 2", "def __init__(self, n_qubit, n_class, nums, label): self.nums = nums self.label = label self.n_class", "'次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss, acc, test_acc def", "# 训练集数据 # y = label y = np.zeros((self.nums, self.n_class)) for k in", "t in range(len(theta)): grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i]))", "for i in range(l): psi[j,i] = psi[j,i] / s return psi def train1(self,", "in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1], 0] ** 2", "getBlock1(self, theta): # 常见的PQC block QG = QuantumGate() layer = len(theta) // 8", "left[num_para] - pi / 4 right[num_para] = right[num_para] + pi / 4 #", "QG = QuantumGate() layer = len(theta) // 8 U = np.eye(2 ** self.n_qubit)", "** 2 # 右边 out_r = self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class) for", "2 return expect def getBlock1(self, theta): # 常见的PQC block QG = QuantumGate() layer", "range(len(x)): x[k] = np.exp(x[k]) / A return x def partial_NLL(self, x, y): return", "# 返回PSR的结果 left = deepcopy(theta) right = deepcopy(theta) left[num_para] = left[num_para] - pi", "import deepcopy class Networks: def __init__(self, n_qubit, n_class, nums, label): self.nums = nums", "返回PSR的结果 left = deepcopy(theta) right = deepcopy(theta) left[num_para] = left[num_para] - pi /", "pi, sin, cos, sqrt, exp from copy import deepcopy class Networks: def __init__(self,", "/ self.nums # 更新参数 tmp = 0 for i in range(self.nums): tmp -=", "# 常见的PQC block QG = QuantumGate() layer = len(theta) // 8 U =", "soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr * (soft_e - y[i]).reshape((1, self.n_class)) @", "init): test_expect = np.zeros((len(self.label) - self.nums, self.n_class)) for n in range(self.nums, len(self.label)): test_expect[n", "print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss, acc,", "for k in range(self.nums): y[k, label[k]] = 1 loss = np.zeros(epochs) for epoch", "0] ** 2 return expect_r - expect_l def getExpectation1(self, theta, init): res =", "grad_e = self.getGradient1(theta, t, psi[i].reshape(len(psi[i]), 1)) soft_e = self.Softmax(deepcopy(expect[epoch, i])) delta[t] += lr", "j in range(h): s = sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i] = psi[j,i]", "in range(len(x)): x[k] = np.exp(x[k]) / A return x def partial_NLL(self, x, y):", "range(self.nums): tmp -= np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch] = tmp acc[epoch] =", "right = deepcopy(theta) left[num_para] = left[num_para] - pi / 4 right[num_para] = right[num_para]", "- self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def", "self.n_class = n_class self.n_qubit = n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9],", "as np import scipy.sparse as sp from numpy import pi, sin, cos, sqrt,", "i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label):", "i in range(self.n_class): expect[i] = res[self.map[i][0], 0] ** 2 + res[self.map[i][1], 0] **", "for n in range(self.nums, len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc", "def Softmax(self, x): A = sum(np.exp(x)) for k in range(len(x)): x[k] = np.exp(x[k])", "get_residuals(self, prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264", "lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs) test_acc =", "@ init expect_l = np.zeros(self.n_class) for i in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0]", "# expect的shape为:[num, n_class] acc = 0 for j in range(expect.shape[0]): arg = np.argmax(expect[j])", "- y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -= delta / self.nums # 更新参数", "out_r = self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class) for i in range(self.n_class): expect_r[i]", "6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j in range(nums): for i in", "pi / 4 # 左边 out_l = self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class)", "return theta, loss, acc, test_acc def getGradient1(self, theta, num_para, init): # 返回PSR的结果 left", "adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264 * prop /", "def normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度 for j in range(h): s =", "* np.random.randint(low=0, high=2, size=len(row))) * prop residuals = sp.coo_matrix((data, (row, col)), shape=(2708, 2708))", "i in range(self.nums): tmp -= np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch] = tmp", "* prop / 2)) row = np.hstack((a, b)) col = np.hstack((b, a)) data", "np.kron( np.kron(self.Ry(theta[lay * 8 + 0]), self.Ry(theta[lay * 8 + 1])), self.Ry(theta[lay *", "np.zeros((len(self.label) - self.nums, self.n_class)) for n in range(self.nums, len(self.label)): test_expect[n - self.nums] =", "deepcopy class Networks: def __init__(self, n_qubit, n_class, nums, label): self.nums = nums self.label", "** 2 + out_l[self.map[i][1], 0] ** 2 # 右边 out_r = self.getBlock1(theta=right) @", "data = (np.ones_like(row) - 2 * np.random.randint(low=0, high=2, size=len(row))) * prop residuals =", "nums self.label = label self.n_class = n_class self.n_qubit = n_qubit self.map = {0:[0,1],", "= 1 loss = np.zeros(epochs) for epoch in range(epochs): delta = np.zeros_like(theta) #", "= np.zeros((self.nums, self.n_class)) for k in range(self.nums): y[k, label[k]] = 1 loss =", "expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)): grad_e = self.getGradient1(theta,", "range(h): s = sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i] = psi[j,i] / s", "# 归一化 def normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度 for j in range(h):", "** self.n_qubit) for lay in range(layer): U1 = np.kron( np.kron( np.kron(self.Ry(theta[lay * 8", "U2 @ U1 return U def Softmax(self, x): A = sum(np.exp(x)) for k", "in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) * init[i] return psi #随机采样,默认值10 def randomSample(self,psi,label): ind", "QuantumGate import QuantumGate import numpy as np import scipy.sparse as sp from numpy", "* 8 + 1])), self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay * 8 +", "* 8 + 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 +", "prop): # adj中共有13264条边, # 我们假设残差项矩阵不为0的元素为13264*prop, # 同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264 *", "self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class) for i in range(self.n_class): expect_l[i] = out_l[self.map[i][0],", "init expect_r = np.zeros(self.n_class) for i in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] **", "i in range(self.nums): # 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t", "x): A = sum(np.exp(x)) for k in range(len(x)): x[k] = np.exp(x[k]) / A", "np.zeros(epochs) psi = init[0:self.nums] # 训练集数据 # y = label y = np.zeros((self.nums,", "8 + 5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6])))", "= np.zeros((nums,2**self.n_qubit)) for j in range(nums): for i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1))", "theta, num_para, init): # 返回PSR的结果 left = deepcopy(theta) right = deepcopy(theta) left[num_para] =", "同时应保证其范数为2708*prop a = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) b = np.random.randint(low=0,", "np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) b = np.random.randint(low=0, high=2708, size=int(13264 *", "@ U3 @ U2 @ U1 return U def Softmax(self, x): A =", "@ U2 @ U1 return U def Softmax(self, x): A = sum(np.exp(x)) for", "label): self.nums = nums self.label = label self.n_class = n_class self.n_qubit = n_qubit", "theta, init, label, epochs, lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class)) acc", "range(self.nums): y[k, label[k]] = 1 loss = np.zeros(epochs) for epoch in range(epochs): delta", "U=self.Ry(theta=theta[lay * 8 + 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7]))", "psi[j,i] / s return psi def train1(self, theta, init, label, epochs, lr): #", "8 + 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))),", "right[num_para] + pi / 4 # 左边 out_l = self.getBlock1(theta=left) @ init expect_l", "+ out_r[self.map[i][1], 0] ** 2 return expect_r - expect_l def getExpectation1(self, theta, init):", "for i in range(self.n_class): expect_r[i] = out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1], 0]", "2 + res[self.map[i][1], 0] ** 2 return expect def getBlock1(self, theta): # 常见的PQC", "= 0 for i in range(self.nums): tmp -= np.log(expect[epoch, i, label[i]]) # 计算损失函数", "** 2 return expect_r - expect_l def getExpectation1(self, theta, init): res = self.getBlock1(theta=theta)", "label): # expect的shape为:[num, n_class] acc = 0 for j in range(expect.shape[0]): arg =", "** 2 + res[self.map[i][1], 0] ** 2 return expect def getBlock1(self, theta): #", "= np.argmax(expect[j]) if arg == label[j]: acc += 1 return acc def get_residuals(self,", "n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi", "np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 5]))), QG.I) U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay", "row = np.hstack((a, b)) col = np.hstack((b, a)) data = (np.ones_like(row) - 2", "= (np.ones_like(row) - 2 * np.random.randint(low=0, high=2, size=len(row))) * prop residuals = sp.coo_matrix((data,", "= np.exp(x[k]) / A return x def partial_NLL(self, x, y): return sum(x -", "sp from numpy import pi, sin, cos, sqrt, exp from copy import deepcopy", "lr * (soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -= delta /", "theta, init): res = self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class) for i in", "i in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1], 0] **", "numpy import pi, sin, cos, sqrt, exp from copy import deepcopy class Networks:", "= deepcopy(theta) left[num_para] = left[num_para] - pi / 4 right[num_para] = right[num_para] +", "8 + 2])), self.Ry(theta[lay * 8 + 3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay *", "expect_l = np.zeros(self.n_class) for i in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] ** 2", "= self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self, expect, label): # expect的shape为:[num, n_class] acc", "scipy.sparse as sp from numpy import pi, sin, cos, sqrt, exp from copy", "* prop / 2)) b = np.random.randint(low=0, high=2708, size=int(13264 * prop / 2))", "self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class) for i in range(self.n_class): expect[i] = res[self.map[i][0],", "uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j in range(nums): for i in range(2**self.n_qubit): psi[j,i]", "n_class, nums, label): self.nums = nums self.label = label self.n_class = n_class self.n_qubit", "= np.hstack((b, a)) data = (np.ones_like(row) - 2 * np.random.randint(low=0, high=2, size=len(row))) *", "s = sqrt(sum(psi[j]**2)) for i in range(l): psi[j,i] = psi[j,i] / s return", "theta, init): test_expect = np.zeros((len(self.label) - self.nums, self.n_class)) for n in range(self.nums, len(self.label)):", "return expect_r - expect_l def getExpectation1(self, theta, init): res = self.getBlock1(theta=theta) @ init", "/ A return x def partial_NLL(self, x, y): return sum(x - y) def", "* 8 + 7])) U = U @ U5 @ U4 @ U3", "in range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for i in range(self.nums): # 先根据现有的参数计算一次期望概率", "np.zeros(self.n_class) for i in range(self.n_class): expect[i] = res[self.map[i][0], 0] ** 2 + res[self.map[i][1],", "# 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs) test_acc = np.zeros(epochs)", "label) test_acc[epoch] = self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc',", "= np.zeros(self.n_class) for i in range(self.n_class): expect[i] = res[self.map[i][0], 0] ** 2 +", "+ pi / 4 # 左边 out_l = self.getBlock1(theta=left) @ init expect_l =", "'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return theta, loss, acc, test_acc def getGradient1(self, theta, num_para,", "QuantumGate() layer = len(theta) // 8 U = np.eye(2 ** self.n_qubit) for lay", "U4 = np.kron(np.eye(4), QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay", "init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self, expect, label): #", "tmp -= np.log(expect[epoch, i, label[i]]) # 计算损失函数 loss[epoch] = tmp acc[epoch] = self.get_accuracy(expect[epoch],", "6]))) U5 = QG.UnC1(n=2, U=self.Ry(theta=theta[lay * 8 + 7])) U = U @", "expect_l def getExpectation1(self, theta, init): res = self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class)", "= np.zeros((epochs, self.nums, self.n_class)) acc = np.zeros(epochs) test_acc = np.zeros(epochs) psi = init[0:self.nums]", "self.n_class)) for k in range(self.nums): y[k, label[k]] = 1 loss = np.zeros(epochs) for", "np.random.randint(low=0, high=2708, size=int(13264 * prop / 2)) row = np.hstack((a, b)) col =", "@ U1 return U def Softmax(self, x): A = sum(np.exp(x)) for k in", "+= lr * (soft_e - y[i]).reshape((1, self.n_class)) @ grad_e.reshape((self.n_class, 1)) theta -= delta", "init expect = np.zeros(self.n_class) for i in range(self.n_class): expect[i] = res[self.map[i][0], 0] **", "init): res = self.getBlock1(theta=theta) @ init expect = np.zeros(self.n_class) for i in range(self.n_class):", "= n_class self.n_qubit = n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5], 3:[6,7], 4:[8,9], 5:[10,11],", "1)) theta -= delta / self.nums # 更新参数 tmp = 0 for i", "in range(self.nums, len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect,", "= self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self, expect,", "= self.getBlock1(theta=right) @ init expect_r = np.zeros(self.n_class) for i in range(self.n_class): expect_r[i] =", "4:[8,9], 5:[10,11], 6:[12,13]} def uniformstate(self,sigma,nums,init): psi = np.zeros((nums,2**self.n_qubit)) for j in range(nums): for", "loss = np.zeros(epochs) for epoch in range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for", "right[num_para] = right[num_para] + pi / 4 # 左边 out_l = self.getBlock1(theta=left) @", "j in range(expect.shape[0]): arg = np.argmax(expect[j]) if arg == label[j]: acc += 1", "= np.zeros(self.n_class) for i in range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] ** 2 +", "theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta, init): test_expect = np.zeros((len(self.label)", "high=2, size=len(row))) * prop residuals = sp.coo_matrix((data, (row, col)), shape=(2708, 2708)) return residuals", "n in range(self.nums, len(self.label)): test_expect[n - self.nums] = self.getExpectation1(theta=theta, init=init[n].reshape(len(init[n]), 1)) test_acc =", "= np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0, U=self.Ry(theta=theta[lay", "range(self.n_class): expect_l[i] = out_l[self.map[i][0], 0] ** 2 + out_l[self.map[i][1], 0] ** 2 #", "np.zeros(epochs) test_acc = np.zeros(epochs) psi = init[0:self.nums] # 训练集数据 # y = label", "return psi #随机采样,默认值10 def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def", "[sin(theta), cos(theta)]]) def test(self, theta, init): test_expect = np.zeros((len(self.label) - self.nums, self.n_class)) for", "test_acc = self.get_accuracy(test_expect, self.label[self.nums:]) return test_acc def get_accuracy(self, expect, label): # expect的shape为:[num, n_class]", "col = np.hstack((b, a)) data = (np.ones_like(row) - 2 * np.random.randint(low=0, high=2, size=len(row)))", "= self.test(theta=theta, init=init) print('第', epoch, '次迭代,', 'loss:', loss[epoch], 'train_acc:', acc[epoch], 'test_acc', test_acc[epoch]) return", "from QuantumGate import QuantumGate import numpy as np import scipy.sparse as sp from", "np.hstack((b, a)) data = (np.ones_like(row) - 2 * np.random.randint(low=0, high=2, size=len(row))) * prop", "self.Ry(theta[lay * 8 + 2])), self.Ry(theta[lay * 8 + 3])) U2 = np.kron(QG.C1nU(n=0,", "U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I, QG.C1nU(n=0,", "train1(self, theta, init, label, epochs, lr): # 输入为随机采样后的量子态集合 expect = np.zeros((epochs, self.nums, self.n_class))", "1 loss = np.zeros(epochs) for epoch in range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度", "epoch in range(epochs): delta = np.zeros_like(theta) # 每个epoch更新一次梯度 for i in range(self.nums): #", "= label self.n_class = n_class self.n_qubit = n_qubit self.map = {0:[0,1], 1:[2,3], 2:[4,5],", "- pi / 4 right[num_para] = right[num_para] + pi / 4 # 左边", "expect_r[i] = out_r[self.map[i][0], 0] ** 2 + out_r[self.map[i][1], 0] ** 2 return expect_r", "def getBlock1(self, theta): # 常见的PQC block QG = QuantumGate() layer = len(theta) //", "normalize(self,psi): h,l = psi.shape # h是psi的个数,l是每个psi的维度 for j in range(h): s = sqrt(sum(psi[j]**2))", "k in range(self.nums): y[k, label[k]] = 1 loss = np.zeros(epochs) for epoch in", "3])) U2 = np.kron(QG.C1nU(n=0, U=self.Ry(theta=theta[lay * 8 + 4])), np.eye(4)) U3 = np.kron(np.kron(QG.I,", "a)) data = (np.ones_like(row) - 2 * np.random.randint(low=0, high=2, size=len(row))) * prop residuals", "left = deepcopy(theta) right = deepcopy(theta) left[num_para] = left[num_para] - pi / 4", "更新参数 tmp = 0 for i in range(self.nums): tmp -= np.log(expect[epoch, i, label[i]])", "# 先根据现有的参数计算一次期望概率 expect[epoch, i] = self.getExpectation1(theta, psi[i].reshape(len(psi[i]), 1)) for t in range(len(theta)): grad_e", "左边 out_l = self.getBlock1(theta=left) @ init expect_l = np.zeros(self.n_class) for i in range(self.n_class):", "y) def Ry(self, theta): return np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]]) def test(self, theta, init):", "/ s return psi def train1(self, theta, init, label, epochs, lr): # 输入为随机采样后的量子态集合", "U1 return U def Softmax(self, x): A = sum(np.exp(x)) for k in range(len(x)):", "def randomSample(self,psi,label): ind = random.sample(list(range(0,len(psi))),self.nums) return psi[ind],label[ind] # 归一化 def normalize(self,psi): h,l =", "sqrt, exp from copy import deepcopy class Networks: def __init__(self, n_qubit, n_class, nums,", "np.zeros((self.nums, self.n_class)) for k in range(self.nums): y[k, label[k]] = 1 loss = np.zeros(epochs)", "np.zeros((nums,2**self.n_qubit)) for j in range(nums): for i in range(2**self.n_qubit): psi[j,i] = (1+np.random.normal(0,sigma,1)) *", "= init[0:self.nums] # 训练集数据 # y = label y = np.zeros((self.nums, self.n_class)) for" ]
[ "1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.slug or not", "models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A')", "uuid import os class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'), )", "'avatar', filename) class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像')", "models from django.contrib.auth.models import User from django.urls import reverse from django.utils.timezone import now", "max_length=200, unique=True) slug = models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now,", "self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views += 1 self.save(update_fields=['views']) def", "= models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True)", "django.urls import reverse from django.utils.timezone import now from django.template.defaultfilters import slugify import uuid", "= models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views", "get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name =", "# do something class Meta: ordering = ['-pub_date'] verbose_name = 'article' def user_directory_path(instance,", "models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status =", "def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50) role", "slug = models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date", "force_update=False, using=None, update_fields=None): if not self.slug or not self.id: self.slug = slugify(self.title) super(Article,", "'{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole path to the file return os.path.join(instance.user.id, 'avatar',", "from django.template.defaultfilters import slugify import uuid import os class Article(models.Model): STATUS_CHOICES = (", "choices=(('A', 'Author'), ('E', 'Editor'))) objects = models.Manager() authors = AuthorManager() editors = EditorManager()", "= models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date =", "ext) # return the whole path to the file return os.path.join(instance.user.id, 'avatar', filename)", "return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar =", "filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole path to the file", "the whole path to the file return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user", "the file return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile')", "verbose_name = 'article' def user_directory_path(instance, filename): ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext)", "self).save() # do something class Meta: ordering = ['-pub_date'] verbose_name = 'article' def", "models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags", "unique=True) slug = models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True)", "django.utils.timezone import now from django.template.defaultfilters import slugify import uuid import os class Article(models.Model):", "class Meta: ordering = ['-pub_date'] verbose_name = 'article' def user_directory_path(instance, filename): ext =", "related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager):", "force_insert=False, force_update=False, using=None, update_fields=None): if not self.slug or not self.id: self.slug = slugify(self.title)", "models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects = models.Manager() authors =", "Meta: ordering = ['-pub_date'] verbose_name = 'article' def user_directory_path(instance, filename): ext = filename.split('.')[-1]", "verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E')", "models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects = models.Manager() authors = AuthorManager() editors =", "class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'), ) title = models.CharField('标题',", "models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views =", "= filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole path to the", "filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole path to the file return", "reverse from django.utils.timezone import now from django.template.defaultfilters import slugify import uuid import os", "class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50) last_name =", "import slugify import uuid import os class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'),", "= models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author", "slugify(self.title) super(Article, self).save() # do something class Meta: ordering = ['-pub_date'] verbose_name =", "create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES)", "# return the whole path to the file return os.path.join(instance.user.id, 'avatar', filename) class", "auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量',", "max_length=60) body = models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True)", "def viewed(self): self.views += 1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if", "class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class", "models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def", "models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间',", "role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects = models.Manager() authors = AuthorManager()", "import models from django.contrib.auth.models import User from django.urls import reverse from django.utils.timezone import", "return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views += 1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False,", "models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date = models.DateTimeField('发布时间',", "user_directory_path(instance, filename): ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole", "last_name = models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects = models.Manager()", "'发表'), ) title = models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug', max_length=60) body =", "models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间',", "models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title def", "+= 1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.slug or", "import reverse from django.utils.timezone import now from django.template.defaultfilters import slugify import uuid import", "'草稿'), ('p', '发表'), ) title = models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug', max_length=60)", ") title = models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug', max_length=60) body = models.TextField('正文')", "['-pub_date'] verbose_name = 'article' def user_directory_path(instance, filename): ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10],", "from django.contrib.auth.models import User from django.urls import reverse from django.utils.timezone import now from", "Person(models.Model): first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E',", "tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title def get_absolute_url(self): return reverse('blog:article_detail',", "return self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views += 1 self.save(update_fields=['views'])", "author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return", "return super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50) role = models.CharField(max_length=1,", "import User from django.urls import reverse from django.utils.timezone import now from django.template.defaultfilters import", "= models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title", "null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1,", "from django.utils.timezone import now from django.template.defaultfilters import slugify import uuid import os class", "slugify import uuid import os class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p',", "= models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date =", "= slugify(self.title) super(Article, self).save() # do something class Meta: ordering = ['-pub_date'] verbose_name", "= 'article' def user_directory_path(instance, filename): ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) #", "return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50)", "( ('d', '草稿'), ('p', '发表'), ) title = models.CharField('标题', max_length=200, unique=True) slug =", "path to the file return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user = models.OneToOneField(User,", "'article' def user_directory_path(instance, filename): ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return", "verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title def get_absolute_url(self):", "self.views += 1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.slug", "UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def", "= models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self):", "reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views += 1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None,", "def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.slug or not self.id: self.slug", "filename) class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class", "User from django.urls import reverse from django.utils.timezone import now from django.template.defaultfilters import slugify", "EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50)", "first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor')))", "import os class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'), ) title", "save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.slug or not self.id: self.slug =", "on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title def get_absolute_url(self): return", "self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not self.slug or not self.id:", "file return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar", "views = models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\",", "models.CharField(max_length=50) last_name = models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects =", "AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model):", "import now from django.template.defaultfilters import slugify import uuid import os class Article(models.Model): STATUS_CHOICES", "get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views += 1 self.save(update_fields=['views']) def save(self, force_insert=False,", "choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag',", "now from django.template.defaultfilters import slugify import uuid import os class Article(models.Model): STATUS_CHOICES =", "or not self.id: self.slug = slugify(self.title) super(Article, self).save() # do something class Meta:", "on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class", "get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50) role =", "from django.db import models from django.contrib.auth.models import User from django.urls import reverse from", "default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态',", "body = models.TextField('正文') pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date", "= models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE)", "os class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'), ) title =", "def __str__(self): return self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views +=", "self.slug or not self.id: self.slug = slugify(self.title) super(Article, self).save() # do something class", "= models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects = models.Manager() authors", "not self.id: self.slug = slugify(self.title) super(Article, self).save() # do something class Meta: ordering", "import uuid import os class Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'),", "args=[str(self.id)]) def viewed(self): self.views += 1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None, update_fields=None):", "('d', '草稿'), ('p', '发表'), ) title = models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug',", "update_fields=None): if not self.slug or not self.id: self.slug = slugify(self.title) super(Article, self).save() #", "= ['-pub_date'] verbose_name = 'article' def user_directory_path(instance, filename): ext = filename.split('.')[-1] filename =", "= models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)])", "max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags =", "verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self):", "something class Meta: ordering = ['-pub_date'] verbose_name = 'article' def user_directory_path(instance, filename): ext", "self.slug = slugify(self.title) super(Article, self).save() # do something class Meta: ordering = ['-pub_date']", "= models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return", "return the whole path to the file return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model):", "viewed(self): self.views += 1 self.save(update_fields=['views']) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not", "filename): ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole path", "avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def", "class Person(models.Model): first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'),", "('p', '发表'), ) title = models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug', max_length=60) body", "if not self.slug or not self.id: self.slug = slugify(self.title) super(Article, self).save() # do", "def user_directory_path(instance, filename): ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the", "ext = filename.split('.')[-1] filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole path to", "os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path,", "STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'), ) title = models.CharField('标题', max_length=200, unique=True)", "to the file return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE,", "= models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects = models.Manager() authors = AuthorManager() editors", "super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50) last_name", "status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User, verbose_name=\"作者\",", "django.contrib.auth.models import User from django.urls import reverse from django.utils.timezone import now from django.template.defaultfilters", "do something class Meta: ordering = ['-pub_date'] verbose_name = 'article' def user_directory_path(instance, filename):", "auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author = models.ForeignKey(User,", "django.template.defaultfilters import slugify import uuid import os class Article(models.Model): STATUS_CHOICES = ( ('d',", "mod_date = models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0)", "Article(models.Model): STATUS_CHOICES = ( ('d', '草稿'), ('p', '发表'), ) title = models.CharField('标题', max_length=200,", "django.db import models from django.contrib.auth.models import User from django.urls import reverse from django.utils.timezone", "= models.CharField(max_length=50) last_name = models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A', 'Author'), ('E', 'Editor'))) objects", "def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='E') class Person(models.Model): first_name", "ordering = ['-pub_date'] verbose_name = 'article' def user_directory_path(instance, filename): ext = filename.split('.')[-1] filename", "class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager):", "super(Article, self).save() # do something class Meta: ordering = ['-pub_date'] verbose_name = 'article'", "from django.urls import reverse from django.utils.timezone import now from django.template.defaultfilters import slugify import", "using=None, update_fields=None): if not self.slug or not self.id: self.slug = slugify(self.title) super(Article, self).save()", "title = models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date", "user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile') avatar = models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self):", "models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self): return self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def", "= ( ('d', '草稿'), ('p', '发表'), ) title = models.CharField('标题', max_length=200, unique=True) slug", "default=0) author = models.ForeignKey(User, verbose_name=\"作者\", on_delete=models.CASCADE) tags = models.ManyToManyField('Tag', verbose_name=\"标签集合\", blank=True) def __str__(self):", "super().get_queryset().filter(role='E') class Person(models.Model): first_name = models.CharField(max_length=50) last_name = models.CharField(max_length=50) role = models.CharField(max_length=1, choices=(('A',", "= '{}.{}'.format(uuid.uuid4().hex[:10], ext) # return the whole path to the file return os.path.join(instance.user.id,", "models.DateTimeField('修改时间', auto_now=True) status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES) views = models.PositiveIntegerField('浏览量', default=0) author =", "self.id: self.slug = slugify(self.title) super(Article, self).save() # do something class Meta: ordering =", "not self.slug or not self.id: self.slug = slugify(self.title) super(Article, self).save() # do something", "blank=True) def __str__(self): return self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views", "whole path to the file return os.path.join(instance.user.id, 'avatar', filename) class UserProfile(models.Model): user =", "= models.CharField('标题', max_length=200, unique=True) slug = models.SlugField('slug', max_length=60) body = models.TextField('正文') pub_date =", "pub_date = models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True)", "__str__(self): return self.title def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views += 1", "= models.DateTimeField('发布时间', default=now, null=True) create_date = models.DateTimeField('创建时间', auto_now_add=True) mod_date = models.DateTimeField('修改时间', auto_now=True) status", "models.ImageField(upload_to=user_directory_path, verbose_name='头像') class AuthorManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(role='A') class EditorManager(models.Manager): def get_queryset(self): return", "def get_absolute_url(self): return reverse('blog:article_detail', args=[str(self.id)]) def viewed(self): self.views += 1 self.save(update_fields=['views']) def save(self," ]
[ "utf-8 -*- import pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this", "coding: utf-8 -*- import pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load", "pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze = subparsers.add_parser('echo_conf', help='Echo sample configuration", "def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze = subparsers.add_parser('echo_conf',", "print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze = subparsers.add_parser('echo_conf', help='Echo sample", "<reponame>365moods/uwsgi # -*- coding: utf-8 -*- import pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\")", "\"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze = subparsers.add_parser('echo_conf', help='Echo sample configuration file')", "# -*- coding: utf-8 -*- import pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def", "import pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze", "def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze = subparsers.add_parser('echo_conf', help='Echo sample configuration file') parser_analyze.set_defaults(func=echo_conf)", "pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze =", "echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\" parser_analyze = subparsers.add_parser('echo_conf', help='Echo", "-*- import pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers): \"\"\"Load this subcommand\"\"\"", "-*- coding: utf-8 -*- import pkg_resources def echo_conf(args): print pkg_resources.resource_string('uwsgi_sloth', \"sample.conf\") def load_subcommand(subparsers):" ]
[]
[ "1 self.explore(source, next, visited, ranks) # TODO Try using some sort of centrality", "'_' + str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') #", "!= n_qubits: sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in", "find the most connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(),", "Unless required by applicable law or agreed to in writing, software # distributed", "target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)', str(control), str(target))", "if max_qubits < self.__n_qubits: logger.critical('create() - Can use only up to %s qubits',", "n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size)", "experiment on the given device def envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory),", "use @staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph is None: inverse_graph = {} for", "def parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle)", "sort of centrality algorithm def start_explore(self, graph, ranks): visited = dict() for source", "execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result", "Null argument: coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self,", "envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts = result.get_counts(\"envariance\") except Exception:", "def create(self, circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits =", "// 2 for i in sorted_c: reverse = i[0][::-1] sorted_v = [] for", "for credits to replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits", "in inverse_graph: inverse_graph.update({node: []}) # find the most connected qubit @staticmethod def find_max(ranks):", "node not in inverse_graph: inverse_graph.update({node: []}) # find the most connected qubit @staticmethod", "def cx(self, circuit, control_qubit, target_qubit, control, target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx()", "execution') try: result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900)", "for credits to replenish...', n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900)", "oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True)", "url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Execution %d -", "= len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count = max - 1 changed =", "Waiting for credits to replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900)", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "results.update({value: i[1]}) out_f.write(value + '\\t' + str(i[1]) + '\\n') out_f.close() # launch parity", "self.__plain_map = dict() self.__path = dict() self.__n_qubits = 0 self.__ranks = dict() self.__connected", "= Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected", "self.__ranks = dict() self.__connected = dict() self.__most_connected = [] if coupling_map: self.__coupling_map =", "+ '/' + 'execution' + str( execution) + '/' + device + '_'", "envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0))", "self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1 # place Hadamard gates def", "self.__connected: if qubit != start: circuit.h(quantum_r[qubit]) else: if initial is True: if x", "continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Execution %d - Shots", "self.__n_qubits = 0 self.__connected.clear() return connected def parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'):", "directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if device == qx2", "to_connect: if count <= 0: break for node in plain_map[to_connect[visiting]]: if count <=", "0 results = dict() if device == qx2 or device == qx4: if", "= sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits = 0", "file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2 for i in", "logger.critical('%s is back online, resuming execution', device) except ConnectionError: logger.critical('Error getting backend status,", "n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True)", "- oracle != 00') stop = self.__n_qubits // 2 for qubit in self.__connected:", "utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size)", "oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return", "!', device) exit(1) elif device == qx3 or device == qx5: if n_qubits", "start in graph[end]: if start not in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for", "if count <= 0: break for node in plain_map[to_connect[visiting]]: if count <= 0:", "if device == qx2 or device == qx4: if n_qubits <= 5: size", ">= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1 i = 0 for qubit", "self.__n_qubits // 2 i = 0 count = self.__n_qubits - 1 for qubit", "= Q_program.get_backend_status(device) if ('available' in backend_status and backend_status['available'] is False) \\ or ('busy'", "resuming execution', device) except ConnectionError: logger.critical('Error getting backend status, retrying...') sleep(900) continue except", "API url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Oracle %s", "stop = n_qubits // 2 for i in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE,", "result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d", "%s', str(self.__n_qubits)) logger.debug('create() - Max qubits: %s', str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create()", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts in", "\"Apache\" __version__ = \"2.0\" __email__ = \"<EMAIL>\" import os from time import sleep", "resuming execution') try: result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception:", "exit(2) count = self.__n_qubits for qubit in self.__path: if count <= 0: break", "= coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse", "n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if device", "= \"<NAME>\" __copyright__ = \"Copyright 2017, Quantum Information Science, University of Parma, Italy\"", "found # create a valid path that connect qubits used in the circuit", "+ str( num_shots) + 'queries_' + oracle + '_' + str( n_qubits) +", "device) exit(2) elif device == online_sim: if n_qubits <= 5: size = 5", "logger.debug('create() - N qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max qubits: %s', str(max_qubits)) if", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "(%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() -", "coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map = dict() self.__plain_map = dict() self.__path = dict()", "- sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits // 2 i = 0 count =", "on the given device def parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory),", "def measure(self, circuit, quantum_r, classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create", "def invert_graph(graph, inverse_graph=None): if inverse_graph is None: inverse_graph = {} for end in", "valid path that connect qubits used in the circuit def create_path(self, start, plain_map):", "Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits,", "%d - Oracle %s - Execution %d - Queries %d', n_qubits, oracle, execution,", "available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Execution", "node not in self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path() - path:\\n%s', str(self.__path))", "x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear()", "break if i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1 i =", "__email__ = \"<EMAIL>\" import os from time import sleep from devices import *", "waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Execution %d", "quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits = len(self.__path) logger.debug('create() -", "self.__connected: if self.__connected[qubit] != -1: if oracle == '11': logger.log(logging.VERBOSE, 'place_cx() - oracle", "end in graph: for start in graph[end]: if start not in inverse_graph: inverse_graph.update({start:", "connect qubit %s to qubit %s', str(control), str(target)) exit(3) # place cnot gates", "sorted_c: reverse = i[0][::-1] sorted_v = [] for n in range(n_qubits - stop):", "device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\",", "in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop =", "store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected))", "in for 1st loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0]", "= sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device + '/' + 'execution'", "invert_graph(graph, inverse_graph=None): if inverse_graph is None: inverse_graph = {} for end in graph:", "circuit, control_qubit, target_qubit, control, target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot:", "- inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else:", "if count <= 0: break if i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i", "ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d", "[quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() -", "+= 1 # final measure def measure(self, circuit, quantum_r, classical_r): for qubit in", "- %s', str(visiting), str(to_connect[visiting])) # for visiting in to_connect: if count <= 0:", "str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect qubit", "sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t'", "parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected", "limitations under the License. # ============================================================================= __author__ = \"<NAME>\" __copyright__ = \"Copyright 2017,", "classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit def create(self,", "n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit", "# place cnot gates based on the path created in create_path method def", "not use this file except in compliance with the License. # You may", "str(sorted_v[n + 2])) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value", "00') stop = self.__n_qubits // 2 for qubit in self.__connected: if self.__connected[qubit] !=", "else: logger.critical('launch_exp() - Too much qubits for %s !', device) exit(1) elif device", "loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st for", "logger.critical('init() - Null argument: coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear()", "for 1st loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in", "circuit, quantum_r, classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "i in sorted_c: reverse = i[0][::-1] sorted_v = [] for n in range(n_qubits", "2])) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t'", "- Execution %d - Queries %d ---- Waiting for credits to replenish...', n_qubits,", "connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found =", "not in to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self,", "!= 00') stop = self.__n_qubits // 2 for qubit in self.__connected: if self.__connected[qubit]", "Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d", "# Copyright 2017 Quantum Information Science, University of Parma, Italy. All Rights Reserved.", "agreed to in writing, software # distributed under the License is distributed on", "in plain_map[to_connect[visiting]]: if count <= 0: break if node not in self.__path: self.__path.update({node:", "str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts", "circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source", "logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0],", "[]}) # find the most connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected", "+ '/' + oracle + '/' + 'execution' + str( execution) + '/'", "exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting, visited,", "wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Execution", "[most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return found # create a valid path that", "n_qubits <= 16: size = 16 else: logger.critical('launch_exp() - Unknown device.') exit(3) Q_program", "logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory +", "range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value +", ">= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1 # final measure def measure(self,", "= dict() self.__connected = dict() self.__most_connected = [] if coupling_map: self.__coupling_map = coupling_map.copy()", "map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null", "16 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s", "2017 Quantum Information Science, University of Parma, Italy. All Rights Reserved. # #", "\"<NAME>\" __copyright__ = \"Copyright 2017, Quantum Information Science, University of Parma, Italy\" __license__", "for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map))", "directory + device + '/' + 'execution' + str( execution) + '/' +", "connected[n+stop+1], sorted_v[n+2] in 2nd for loop: %s%s', str(connected[n + stop + 1]), str(sorted_v[n", "+ str(i[1]) + '\\n') out_f.close() # launch parity experiment on the given device", "parity experiment on the given device def parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024,", "if you clone QISKit from the Git repo and use like a global.", "for loop: %s%s', str(connected[n + stop + 1]), str(sorted_v[n + 2])) value =", "def create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect = [start] max = len(self.__coupling_map) logger.debug('create_path()", "parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try: counts = result.get_counts('parity') except", "= [start] max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count = max -", "plain_map=self.__plain_map) else: logger.critical('init() - Null argument: coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear()", "to in writing, software # distributed under the License is distributed on an", "Execution %d - Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots,", "store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits //", "1]), str(sorted_v[n + 1])) if (n + stop + 1) != n_qubits: sorted_v.append(reverse[connected[n", "Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected =", "implied. # See the License for the specific language governing permissions and #", "to %s qubits', str(max_qubits)) exit(2) count = self.__n_qubits for qubit in self.__path: if", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "visited = dict() for source in graph: visited.update({source: []}) self.explore(source, source, visited, ranks)", "path created in create_path method def place_cx(self, circuit, quantum_r, oracle='11'): if not oracle", "= sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits // 2", "result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred,", "@staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph is None: inverse_graph = {} for end", "results = dict() if device == qx2 or device == qx4: if n_qubits", "for start in graph[end]: if start not in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end)", "logger.critical('API Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d - Queries", "Can use only up to %s qubits', str(max_qubits)) exit(2) count = self.__n_qubits for", "else: circuit.x(quantum_r[qubit[0]]) i += 1 # final measure def measure(self, circuit, quantum_r, classical_r):", "Q_program.get_backend_status(device) if ('available' in backend_status and backend_status['available'] is False) \\ or ('busy' in", "False: sleep(1800) logger.critical('%s is back online, resuming execution', device) except ConnectionError: logger.critical('Error getting", "backend status, retrying...') sleep(900) continue except ValueError: logger.critical('Backend is not available, waiting...') sleep(900)", "sys.path.append( # solve the relative dependencies if you clone QISKit from the Git", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "reverse=True) filename = directory + device + '/' + 'execution' + str( execution)", "str( execution) + '/' + device + '_' + str( num_shots) + 'queries_'", "for n in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in", "stop = self.__n_qubits // 2 for qubit in self.__connected: if self.__connected[qubit] != -1:", "stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd for loop: %s%s',", "N qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max qubits: %s', str(max_qubits)) if max_qubits <", "for n in range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]]) for n in range(stop):", "place cnot gates based on the path created in create_path method def place_cx(self,", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting, visited, ranks): for next in self.__coupling_map[visiting]:", "= len(self.__path) logger.debug('create() - N qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max qubits: %s',", "plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() -", "logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5)", "< 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute([\"envariance\"], backend=device, wait=2,", "= ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t' + str(i[1])", "%d ---- Waiting for credits to replenish...', n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining']", "< 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute(['parity'], backend=device, wait=2,", "txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop = n_qubits", "in 2nd for loop: %s%s', str(connected[n + stop + 1]), str(sorted_v[n + 2]))", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "connected[0] in 1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st", "str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if ('available' in backend_status and backend_status['available']", "you may not use this file except in compliance with the License. #", "Information Science, University of Parma, Italy\" __license__ = \"Apache\" __version__ = \"2.0\" __email__", "QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if ('available' in backend_status and", "str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s,", "measure def measure(self, circuit, quantum_r, classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) #", "classical_r, n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits =", "Execution %d - Queries %d', n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits,", "self.__coupling_map[visiting]: if next not in visited[source]: visited[source].append(next) if next not in ranks: ranks.update({next:", "launch envariance experiment on the given device def envariance_exec(execution, device, utility, n_qubits, num_shots=1024,", "%s !', device) exit(1) elif device == qx3 or device == qx5: if", "self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting, visited, ranks): for next", "16 else: logger.critical('launch_exp() - Unknown device.') exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"])", "not in ranks: ranks.update({next: 0}) ranks[next] = ranks[next] + 1 self.explore(source, next, visited,", "device + '/' + 'execution' + str( execution) + '/' + device +", "the License. # ============================================================================= __author__ = \"<NAME>\" __copyright__ = \"Copyright 2017, Quantum Information", "# for visiting in to_connect: if count <= 0: break for node in", "Italy. All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit,", "Git repo and use like a global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram import", "used in the circuit def create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect = [start]", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "+ '\\t' + str(i[1]) + '\\n') out_f.close() # launch parity experiment on the", "self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null argument: coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear()", "directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\",", "n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits = len(self.__path) logger.debug('create() - N qubits:", "counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d -", "place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0", "logging import myLogger import operator import sys sys.path.append( # solve the relative dependencies", "quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True:", "self.__connected = dict() self.__most_connected = [] if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init()", "source, visiting, visited, ranks): for next in self.__coupling_map[visiting]: if next not in visited[source]:", "sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear()", "= n_qubits // 2 for i in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp()", "visiting in to_connect: if count <= 0: break for node in plain_map[to_connect[visiting]]: if", "quantum_r, initial=True, x=True): for qubit in self.__connected: if qubit != start: circuit.h(quantum_r[qubit]) else:", "def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max:", "sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d -", "retrying...') sleep(900) continue except ValueError: logger.critical('Backend is not available, waiting...') sleep(900) continue break", "replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution')", "place_cx(self, circuit, quantum_r, oracle='11'): if not oracle == '00': logger.log(logging.VERBOSE, 'place_cx() - oracle", "> 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1 # place Hadamard", "!', device) exit(2) elif device == online_sim: if n_qubits <= 5: size =", "str(control), str(target)) exit(3) # place cnot gates based on the path created in", "# limitations under the License. # ============================================================================= __author__ = \"<NAME>\" __copyright__ = \"Copyright", "and API url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Oracle", "%d - Queries %d ---- Waiting for credits to replenish...', n_qubits, oracle, execution,", "str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st for loop: %s',", "reverse = i[0][::-1] sorted_v = [] for n in range(n_qubits - stop): sorted_v.append(reverse[connected[n", "initial=False) if x is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit,", "gates def place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s',", "create a valid path that connect qubits used in the circuit def create_path(self,", "sorted_c: if count <= 0: break if i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]])", "circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect qubit %s to qubit %s', str(control),", "= dict() self.__n_qubits = 0 self.__ranks = dict() self.__connected = dict() self.__most_connected =", "logger.debug('max: %s', str(found)) return found # create a valid path that connect qubits", "ranks[next] = ranks[next] + 1 self.explore(source, next, visited, ranks) # TODO Try using", "try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken and API url except ConnectionError: sleep(900)", "parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results", "%s to qubit %s', str(control), str(target)) exit(3) # place cnot gates based on", "logger.critical('launch_exp() - Too much qubits for %s !', device) exit(2) elif device ==", "3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000,", "Waiting for credits to replenish...', n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3:", "if initial is True: if x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place", "= 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s !', device) exit(2)", "'_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts in txt file", "circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1 i = 0 for qubit in sorted_c:", "while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute(['parity'],", "the Git repo and use like a global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram", "import operator import sys sys.path.append( # solve the relative dependencies if you clone", "Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected =", "+ stop + 1) != n_qubits: sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp()", "out_f.close() # launch parity experiment on the given device def parity_exec(execution, device, utility,", "and use like a global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram import Qconfig logger", "-= 1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle)", "if next not in visited[source]: visited[source].append(next) if next not in ranks: ranks.update({next: 0})", "logger.critical('launch_exp() - Unknown device.') exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set", "qubits: %s', str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create() - Can use only up", "%d', n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return", "count -= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if node not in to_connect: to_connect.append(node)", "- 1 for qubit in sorted_c: if count <= 0: break if i", "created in create_path method def place_cx(self, circuit, quantum_r, oracle='11'): if not oracle ==", "logger.log(logging.VERBOSE, 'place_cx() - oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle", "self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() -", "+ 1]), str(sorted_v[n + 1])) if (n + stop + 1) != n_qubits:", "= 5 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for", "graph: for start in graph[end]: if start not in inverse_graph: inverse_graph.update({start: [end]}) else:", "= Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r,", "visited, ranks) # TODO Try using some sort of centrality algorithm def start_explore(self,", "algorithm def start_explore(self, graph, ranks): visited = dict() for source in graph: visited.update({source:", "in graph: for start in graph[end]: if start not in inverse_graph: inverse_graph.update({start: [end]})", "shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d -", "logger.debug('create_path() - max:\\n%s', str(max)) count = max - 1 changed = True visiting", "= n_qubits max_qubits = len(self.__path) logger.debug('create() - N qubits: %s', str(self.__n_qubits)) logger.debug('create() -", "elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit)", "= False class Utility(object): def __init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map = dict()", "range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]]) value =", "< self.__n_qubits: logger.critical('create() - Can use only up to %s qubits', str(max_qubits)) exit(2)", "size = 16 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits", "- counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "and backend_status['available'] is False) \\ or ('busy' in backend_status and backend_status['busy'] is True):", "- Queries %d ---- Waiting for credits to replenish...', n_qubits, oracle, execution, num_shots)", "for qubit in sorted_c: if count <= 0: break if i >= s_0:", "i = 0 for qubit in sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else:", "logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d - Queries %d',", "if node not in inverse_graph: inverse_graph.update({node: []}) # find the most connected qubit", "= list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected def", "num_shots) + 'queries_' + oracle + '_' + str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename),", "str(num_shots) + '_' + str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename,", "circuit.h(quantum_r[qubit]) else: if initial is True: if x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit])", "classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected))", "oracle=oracle, num_shots=num_shots, directory=directory) return try: counts = result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits", "count > 0: logger.debug('create_path() - visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) # for visiting", "key=operator.itemgetter(1), reverse=True) filename = directory + device + '/' + oracle + '/'", "n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits = 0", "+ stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd for loop:", "target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)', str(control), str(target))", "%d - Execution %d - Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility,", "- connected[n+1], sorted_v[n+1] in 2nd for loop: %s,%s', str(connected[n + 1]), str(sorted_v[n +", "See the License for the specific language governing permissions and # limitations under", "permissions and # limitations under the License. # ============================================================================= __author__ = \"<NAME>\" __copyright__", "in to_connect: if count <= 0: break for node in plain_map[to_connect[visiting]]: if count", "+ str(num_shots) + '_' + str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f =", "find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max: %s',", "= max - 1 changed = True visiting = 0 while count >", "self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting, visited, ranks): for next in", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "x is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit, quantum_r, classical_r,", "return found # create a valid path that connect qubits used in the", "replenish...', n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming", "global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL)", "else: if initial is True: if x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) #", "i[0][::-1] sorted_v = [] for n in range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]])", "self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null argument: coupling_map')", "logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control in", "in graph[end]: if start not in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node", "while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute([\"envariance\"],", "inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node in graph: if node not in", "-1: if oracle == '11': logger.log(logging.VERBOSE, 'place_cx() - oracle = 11') self.cx(circuit, quantum_r[qubit],", "key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits // 2 i =", "n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r", "__version__ = \"2.0\" __email__ = \"<EMAIL>\" import os from time import sleep from", "device + '_' + str( num_shots) + 'queries_' + oracle + '_' +", "if start not in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node in graph:", "break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Oracle %s - Execution %d", "= {} for end in graph: for start in graph[end]: if start not", "sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device + '/' +", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object): def", "for loop: %s', str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp()", "sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t' + str(i[1]) + '\\n') out_f.close() # launch", "governing permissions and # limitations under the License. # ============================================================================= __author__ = \"<NAME>\"", "open(filename, 'w') # store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp()", "# create an inverted coupling-map for further use @staticmethod def invert_graph(graph, inverse_graph=None): if", "self.__n_qubits - 1 for qubit in sorted_c: if count <= 0: break if", "in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2 for", "1 i = 0 for qubit in sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]])", "start, plain_map): self.__path.update({start: -1}) to_connect = [start] max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s',", "str(self.__n_qubits)) logger.debug('create() - Max qubits: %s', str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create() -", "return connected # launch envariance experiment on the given device def envariance_exec(execution, device,", "the circuit def create(self, circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits", "str(sorted_v[n + 1])) if (n + stop + 1) != n_qubits: sorted_v.append(reverse[connected[n +", "start not in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node in graph: if", "= dict() self.__inverse_coupling_map = dict() self.__plain_map = dict() self.__path = dict() self.__n_qubits =", "self.__path.update({start: -1}) to_connect = [start] max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count", "in sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1 #", "+ stop + 1]), str(sorted_v[n + 2])) value = ''.join(str(v) for v in", "inverted coupling-map for further use @staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph is None:", "self.__most_connected = [] if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map))", "Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d - Queries", "- 1 changed = True visiting = 0 while count > 0: logger.debug('create_path()", "cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx()", "def place_cx(self, circuit, quantum_r, oracle='11'): if not oracle == '00': logger.log(logging.VERBOSE, 'place_cx() -", "in self.__path: if count <= 0: break self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create()", "__license__ = \"Apache\" __version__ = \"2.0\" __email__ = \"<EMAIL>\" import os from time", "qubit in self.__path: if count <= 0: break self.__connected.update({qubit: self.__path[qubit]}) count -= 1", "Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits, execution, num_shots)", "utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1),", "visited, ranks): for next in self.__coupling_map[visiting]: if next not in visited[source]: visited[source].append(next) if", "elif device == qx3 or device == qx5: if n_qubits <= 16: size", "1])) if (n + stop + 1) != n_qubits: sorted_v.append(reverse[connected[n + stop +", "ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d',", "KIND, either express or implied. # See the License for the specific language", "(%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() -", "path:\\n%s', str(self.__path)) if node not in to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path() -", "- Queries %d', n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots,", "control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit)", "str(target)) circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)',", "logger.debug('create_path() - path:\\n%s', str(self.__path)) if node not in to_connect: to_connect.append(node) visiting += 1", "qubits for %s !', device) exit(2) elif device == online_sim: if n_qubits <=", "device.') exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken and", "connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected", "Parma, Italy. All Rights Reserved. # # Licensed under the Apache License, Version", "counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2", "qubit in self.__connected: if qubit != start: circuit.h(quantum_r[qubit]) else: if initial is True:", "in self.__connected: if qubit != start: circuit.h(quantum_r[qubit]) else: if initial is True: if", "if not oracle == '00': logger.log(logging.VERBOSE, 'place_cx() - oracle != 00') stop =", "'execution' + str( execution) + '/' + device + '_' + str(num_shots) +", "'launch_exp() - reverse in for 1st loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE,", "sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st for loop: %s', str(connected[0]))", "for node in plain_map[to_connect[visiting]]: if count <= 0: break if node not in", "- Shots %d ---- Waiting for credits to replenish...', n_qubits, execution, num_shots) while", "import myLogger import operator import sys sys.path.append( # solve the relative dependencies if", "str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null argument:", "logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object): def __init__(self, coupling_map):", "oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try:", "s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1 # final measure def measure(self, circuit,", "- Oracle %s - Execution %d - Queries %d ---- Waiting for credits", "= self.__n_qubits // 2 for qubit in self.__connected: if self.__connected[qubit] != -1: if", "directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if device == qx2", "+ '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts in txt", "logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object): def __init__(self, coupling_map): self.__coupling_map = dict()", "qubit in sorted_c: if count <= 0: break if i >= s_0: circuit.x(quantum_r[qubit[0]])", "ANY KIND, either express or implied. # See the License for the specific", "self.__n_qubits = 0 self.__ranks = dict() self.__connected = dict() self.__most_connected = [] if", "backend_status and backend_status['available'] is False) \\ or ('busy' in backend_status and backend_status['busy'] is", "+ '\\n') out_f.close() # launch parity experiment on the given device def parity_exec(execution,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "if node not in to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path() - path:\\n%s', str(self.__path))", "from the Git repo and use like a global. \"../qiskit-sdk-py\") from qiskit import", "- Null argument: coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def", "self.__n_qubits = n_qubits max_qubits = len(self.__path) logger.debug('create() - N qubits: %s', str(self.__n_qubits)) logger.debug('create()", "except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d -", "wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Oracle", "< 3: logger.critical('Qubits %d - Execution %d - Shots %d ---- Waiting for", "try: counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d", "quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits", "+ coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0],", "xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop = n_qubits // 2 for", "self.__n_qubits: logger.critical('create() - Can use only up to %s qubits', str(max_qubits)) exit(2) count", "break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Execution %d - Shots %d", "Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r", "to replenish...', n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished,", "+ '_' + str( num_shots) + 'queries_' + oracle + '_' + str(", "operator import sys sys.path.append( # solve the relative dependencies if you clone QISKit", "circuit, quantum_r, oracle='11'): if not oracle == '00': logger.log(logging.VERBOSE, 'place_cx() - oracle !=", "n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try: counts = result.get_counts('parity') except Exception: logger.critical('Exception occurred,", "= result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution", "size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source", "sleep(1800) logger.critical('%s is back online, resuming execution', device) except ConnectionError: logger.critical('Error getting backend", "- cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE,", "utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred,", "\"../qiskit-sdk-py\") from qiskit import QuantumProgram import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate", "logger.critical('launch_exp() - Too much qubits for %s !', device) exit(1) elif device ==", "classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits = len(self.__path) logger.debug('create() - N", "the path created in create_path method def place_cx(self, circuit, quantum_r, oracle='11'): if not", "qubit in sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1", "[classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s',", "= list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected #", "exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken and API", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits, execution, num_shots) envariance_exec(execution,", "file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop = n_qubits // 2 for i", "myLogger import operator import sys sys.path.append( # solve the relative dependencies if you", "[start] max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count = max - 1", "size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit,", "True: try: backend_status = Q_program.get_backend_status(device) if ('available' in backend_status and backend_status['available'] is False)", "timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s", "%s', str(control), str(target)) exit(3) # place cnot gates based on the path created", "list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected def parity(self,", "__copyright__ = \"Copyright 2017, Quantum Information Science, University of Parma, Italy\" __license__ =", "return connected def parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits,", "on the path created in create_path method def place_cx(self, circuit, quantum_r, oracle='11'): if", "stop -= 1 # place Hadamard gates def place_h(self, circuit, start, quantum_r, initial=True,", "in range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]]) value", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "Queries %d', n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory)", "and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop = n_qubits // 2", "if self.__connected[qubit] != -1: if oracle == '11': logger.log(logging.VERBOSE, 'place_cx() - oracle =", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "loop: %s,%s', str(connected[n + 1]), str(sorted_v[n + 1])) if (n + stop +", "circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect qubit %s to", "in graph: if node not in inverse_graph: inverse_graph.update({node: []}) # find the most", "oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit", "applicable law or agreed to in writing, software # distributed under the License", "quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx() - oracle =", "0 self.__connected.clear() return connected def parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r,", "'/' + device + '_' + str(num_shots) + '_' + str( n_qubits) +", "oracle = 10') if stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop", "- oracle = 10') if stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit])", "= utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while", "circuit, start, quantum_r, initial=True, x=True): for qubit in self.__connected: if qubit != start:", "replenished, resuming execution') try: result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except", "1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if node not in to_connect: to_connect.append(node) visiting +=", "ranks) # create an inverted coupling-map for further use @staticmethod def invert_graph(graph, inverse_graph=None):", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "Max qubits: %s', str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create() - Can use only", "logger.critical('Qubits %d - Execution %d - Shots %d ---- Waiting for credits to", "circuit.iden(quantum_r[qubit[0]]) i += 1 i = 0 for qubit in sorted_c: if i", "experiment on the given device def parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'):", "device) except ConnectionError: logger.critical('Error getting backend status, retrying...') sleep(900) continue except ValueError: logger.critical('Backend", "in to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self, circuit,", "break if node not in self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path() -", "False class Utility(object): def __init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map = dict() self.__plain_map", "'cx() - cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]:", "or device == qx5: if n_qubits <= 16: size = 16 # device", "logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits, execution, num_shots)", "circuit.h(quantum_r[qubit]) # place Pauli-X gates def place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0))", "= 16 else: logger.critical('launch_exp() - Unknown device.') exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken,", "%s - Execution %d - Queries %d', n_qubits, oracle, execution, num_shots) parity_exec(execution, device,", "writing, software # distributed under the License is distributed on an \"AS IS\"", "sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device + '/' + oracle +", "n_qubits <= 16: size = 16 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() -", "i = 0 count = self.__n_qubits - 1 for qubit in sorted_c: if", "os from time import sleep from devices import * import logging import myLogger", "initial=True, x=True): for qubit in self.__connected: if qubit != start: circuit.h(quantum_r[qubit]) else: if", "map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain", "if x is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit, quantum_r,", "quantum_r, classical_r) def envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c", "the most connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0]", "+ str( execution) + '/' + device + '_' + str( num_shots) +", "compliance with the License. # You may obtain a copy of the License", "%s,%s', str(connected[n + 1]), str(sorted_v[n + 1])) if (n + stop + 1)", "= 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s !', device) exit(1)", "in graph: visited.update({source: []}) self.explore(source, source, visited, ranks) # create an inverted coupling-map", "== '00': logger.log(logging.VERBOSE, 'place_cx() - oracle != 00') stop = self.__n_qubits // 2", "import QuantumProgram import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class", "%s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot:", "---- Waiting for credits to replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3:", "str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st for loop: %s', str(sorted_v)) for n", "dependencies if you clone QISKit from the Git repo and use like a", "except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d',", "while True: try: backend_status = Q_program.get_backend_status(device) if ('available' in backend_status and backend_status['available'] is", "exist_ok=True) out_f = open(filename, 'w') # store counts in txt file and xlsx", "return try: counts = result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Oracle", "logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st for loop: %s', str(sorted_v)) for n in", "= Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected", "circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits = len(self.__path) logger.debug('create()", "i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map,", "oracle == '10': logger.log(logging.VERBOSE, 'place_cx() - oracle = 10') if stop > 0:", "= self.__n_qubits - 1 for qubit in sorted_c: if count <= 0: break", "qubit in self.__connected: if self.__connected[qubit] != -1: if oracle == '11': logger.log(logging.VERBOSE, 'place_cx()", "classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r,", "classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r,", "final measure def measure(self, circuit, quantum_r, classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit])", "oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status =", "n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits", "self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting, visited, ranks): for next in self.__coupling_map[visiting]: if", "close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting, visited, ranks): for", "- Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return", "visited[source]: visited[source].append(next) if next not in ranks: ranks.update({next: 0}) ranks[next] = ranks[next] +", "device == qx4: if n_qubits <= 5: size = 5 # device =", "%d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r =", "for node in graph: if node not in inverse_graph: inverse_graph.update({node: []}) # find", "self.__most_connected.clear() def explore(self, source, visiting, visited, ranks): for next in self.__coupling_map[visiting]: if next", "device == qx5: if n_qubits <= 16: size = 16 # device =", "(the \"License\"); # you may not use this file except in compliance with", "# Unless required by applicable law or agreed to in writing, software #", "1st for loop: %s', str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE,", "the APIToken and API url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d", "qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit def create(self, circuit, quantum_r,", "by applicable law or agreed to in writing, software # distributed under the", "device == online_sim: if n_qubits <= 5: size = 5 elif n_qubits <=", "self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() -", "'execution' + str( execution) + '/' + device + '_' + str( num_shots)", "n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits", "stop = n_qubits // 2 for i in sorted_c: reverse = i[0][::-1] sorted_v", "available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Oracle", "-= 1 # place Hadamard gates def place_h(self, circuit, start, quantum_r, initial=True, x=True):", "file except in compliance with the License. # You may obtain a copy", "device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception", "some sort of centrality algorithm def start_explore(self, graph, ranks): visited = dict() for", "= Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r,", "in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif", "- stop): sorted_v.append(reverse[connected[n + stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v)", "== qx2 or device == qx4: if n_qubits <= 5: size = 5", "Utility(object): def __init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map = dict() self.__plain_map = dict()", "- Too much qubits for %s !', device) exit(1) elif device == qx3", "- inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]})", "device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r =", "- sorted_v in 1st for loop: %s', str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n", "sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Execution %d -", "Too much qubits for %s !', device) exit(2) elif device == online_sim: if", "i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in for 1st loop: %s', str(reverse)) sorted_v =", "= dict() self.__plain_map = dict() self.__path = dict() self.__n_qubits = 0 self.__ranks =", "n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts in", "s_0 = self.__n_qubits // 2 i = 0 count = self.__n_qubits - 1", "create(self, circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits = len(self.__path)", "in 2nd for loop: %s,%s', str(connected[n + 1]), str(sorted_v[n + 1])) if (n", "- QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if ('available' in backend_status", "logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found))", "visited[source].append(next) if next not in ranks: ranks.update({next: 0}) ranks[next] = ranks[next] + 1", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "device) exit(1) elif device == qx3 or device == qx5: if n_qubits <=", "if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG,", "%d - Shots %d ---- Waiting for credits to replenish...', n_qubits, execution, num_shots)", "self.__connected[qubit] != -1: if oracle == '11': logger.log(logging.VERBOSE, 'place_cx() - oracle = 11')", "oracle != 00') stop = self.__n_qubits // 2 for qubit in self.__connected: if", "the relative dependencies if you clone QISKit from the Git repo and use", "n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status", "dict() self.__path = dict() self.__n_qubits = 0 self.__ranks = dict() self.__connected = dict()", "qx3 or device == qx5: if n_qubits <= 16: size = 16 #", "given device def envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size =", "exit(3) # place cnot gates based on the path created in create_path method", "sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1 # final", "def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting, visited, ranks):", "str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect", "is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit, quantum_r, classical_r, n_qubits):", "count = self.__n_qubits - 1 for qubit in sorted_c: if count <= 0:", "quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits", "= self.__n_qubits for qubit in self.__path: if count <= 0: break self.__connected.update({qubit: self.__path[qubit]})", "not oracle == '00': logger.log(logging.VERBOSE, 'place_cx() - oracle != 00') stop = self.__n_qubits", "<= 0: break if i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1", "= QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken and API url except", "self.__inverse_coupling_map = dict() self.__plain_map = dict() self.__path = dict() self.__n_qubits = 0 self.__ranks", "self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map:", "'/' + device + '_' + str( num_shots) + 'queries_' + oracle +", "else: logger.critical('launch_exp() - Too much qubits for %s !', device) exit(2) elif device", "set the APIToken and API url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits", "count <= 0: break if node not in self.__path: self.__path.update({node: to_connect[visiting]}) count -=", "2017, Quantum Information Science, University of Parma, Italy\" __license__ = \"Apache\" __version__ =", "n_qubits: sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd", "str(connected[n + stop + 1]), str(sorted_v[n + 2])) value = ''.join(str(v) for v", "= 5 elif n_qubits <= 16: size = 16 else: logger.critical('launch_exp() - Unknown", "coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init()", "str(connected[n + 1]), str(sorted_v[n + 1])) if (n + stop + 1) !=", "+ 'queries_' + oracle + '_' + str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True)", "0 self.__connected.clear() return connected # launch envariance experiment on the given device def", "logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit)", "* import logging import myLogger import operator import sys sys.path.append( # solve the", "changed = True visiting = 0 while count > 0: logger.debug('create_path() - visiting:\\n%s", "try: counts = result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s", "qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected,", "- connected[0] in 1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in", "size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit,", "else: logger.critical('init() - Null argument: coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear()", "Information Science, University of Parma, Italy. All Rights Reserved. # # Licensed under", "create_path method def place_cx(self, circuit, quantum_r, oracle='11'): if not oracle == '00': logger.log(logging.VERBOSE,", "num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size)", "connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source))", "num_shots=num_shots, directory=directory) return try: counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits %d", "'/' + oracle + '/' + 'execution' + str( execution) + '/' +", "+ '_' + str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w')", "out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop = n_qubits // 2 for i in", "in the circuit def create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect = [start] max", "qubit %s to qubit %s', str(control), str(target)) exit(3) # place cnot gates based", "v in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t' + str(i[1]) + '\\n') out_f.close()", "path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit, target_qubit, control, target): if target in self.__coupling_map[control]:", "s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1 i = 0 for qubit in", "= self.__n_qubits // 2 i = 0 count = self.__n_qubits - 1 for", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "< 3: logger.critical('Qubits %d - Oracle %s - Execution %d - Queries %d", "else: circuit.iden(quantum_r[qubit[0]]) i += 1 i = 0 for qubit in sorted_c: if", "circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect qubit %s to qubit", "\"<EMAIL>\" import os from time import sleep from devices import * import logging", "backend_status['available'] is False) \\ or ('busy' in backend_status and backend_status['busy'] is True): logger.critical('%s", "+ str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store", "use only up to %s qubits', str(max_qubits)) exit(2) count = self.__n_qubits for qubit", "+ '_' + str(num_shots) + '_' + str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True)", "'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd for loop: %s%s', str(connected[n + stop +", "n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s',", "classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity()", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "= Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits", "[quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp()", "Science, University of Parma, Italy. All Rights Reserved. # # Licensed under the", "for %s !', device) exit(1) elif device == qx3 or device == qx5:", "stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v in sorted_v)", "back online, resuming execution', device) except ConnectionError: logger.critical('Error getting backend status, retrying...') sleep(900)", "online_sim: if n_qubits <= 5: size = 5 elif n_qubits <= 16: size", "while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is back online, resuming execution', device) except", "is not available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d", "ConnectionError: logger.critical('Error getting backend status, retrying...') sleep(900) continue except ValueError: logger.critical('Backend is not", "from qiskit import QuantumProgram import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate =", "resuming execution') try: result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception:", "target_qubit, control, target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)',", "import * import logging import myLogger import operator import sys sys.path.append( # solve", "self.__path = dict() self.__n_qubits = 0 self.__ranks = dict() self.__connected = dict() self.__most_connected", "Too much qubits for %s !', device) exit(1) elif device == qx3 or", "quantum_r, classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit def", "Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if ('available'", "%s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st for loop:", "__author__ = \"<NAME>\" __copyright__ = \"Copyright 2017, Quantum Information Science, University of Parma,", "- Execution %d - Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits,", "the License for the specific language governing permissions and # limitations under the", "'launch_exp() - connected[0] in 1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v", "source in graph: visited.update({source: []}) self.explore(source, source, visited, ranks) # create an inverted", "self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r,", "self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks)", "circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1 # final measure def measure(self, circuit, quantum_r,", "inverse_graph = {} for end in graph: for start in graph[end]: if start", "a global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler())", "logger.critical('cx() - Cannot connect qubit %s to qubit %s', str(control), str(target)) exit(3) #", "str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected # launch envariance experiment on the", "Shots %d ---- Waiting for credits to replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining']", "= n_qubits // 2 for i in sorted_c: reverse = i[0][::-1] sorted_v =", "place Pauli-X gates def place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x()", "replenished, resuming execution') try: result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except", "Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken and API url except ConnectionError: sleep(900) logger.critical('API", "and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2 for i in sorted_c:", "+ 1 self.explore(source, next, visited, ranks) # TODO Try using some sort of", "str( num_shots) + 'queries_' + oracle + '_' + str( n_qubits) + '_qubits_parity.txt'", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit) elif control", "Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Execution %d - Shots %d ---- Waiting", "self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if node not in", "in backend_status and backend_status['busy'] is True): logger.critical('%s currently offline, waiting...', device) while Q_program.get_backend_status(device)['available']", "qubit %s', str(control), str(target)) exit(3) # place cnot gates based on the path", "= i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in for 1st loop: %s', str(reverse)) sorted_v", "= 0 self.__ranks = dict() self.__connected = dict() self.__most_connected = [] if coupling_map:", "0: break if i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1 i", "str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected def parity(self, circuit, quantum_r, classical_r, n_qubits,", "Copyright 2017 Quantum Information Science, University of Parma, Italy. All Rights Reserved. #", "= True visiting = 0 while count > 0: logger.debug('create_path() - visiting:\\n%s -", "logger.critical('create() - Can use only up to %s qubits', str(max_qubits)) exit(2) count =", "[]}) self.explore(source, source, visited, ranks) # create an inverted coupling-map for further use", "node not in to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def", "max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return found # create", "dict() self.__inverse_coupling_map = dict() self.__plain_map = dict() self.__path = dict() self.__n_qubits = 0", "str(connected)) stop = n_qubits // 2 for i in sorted_c: reverse = i[0][::-1]", "Qconfig.config[\"url\"]) # set the APIToken and API url except ConnectionError: sleep(900) logger.critical('API Exception", "== '11': logger.log(logging.VERBOSE, 'place_cx() - oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit])", "not in inverse_graph: inverse_graph.update({node: []}) # find the most connected qubit @staticmethod def", "connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected", "True visiting = 0 while count > 0: logger.debug('create_path() - visiting:\\n%s - %s',", "5 elif n_qubits <= 16: size = 16 else: logger.critical('launch_exp() - Unknown device.')", "os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts in txt file and", "else: circuit.h(quantum_r[qubit]) # place Pauli-X gates def place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(),", "to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit,", "circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect qubit %s", "= \"<EMAIL>\" import os from time import sleep from devices import * import", "<= 16: size = 16 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too", "%d - Oracle %s - Execution %d - Queries %d ---- Waiting for", "logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object): def __init__(self, coupling_map): self.__coupling_map =", "self.__most_connected[0], quantum_r, initial=False) if x is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def", "str( execution) + '/' + device + '_' + str(num_shots) + '_' +", "Version 2.0 (the \"License\"); # you may not use this file except in", "(n + stop + 1) != n_qubits: sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE,", "device + '_' + str(num_shots) + '_' + str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename),", "logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object): def __init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map", "stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1 # place", "range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd for loop:", "quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx() - oracle = 10')", "2 for qubit in self.__connected: if self.__connected[qubit] != -1: if oracle == '11':", "quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r,", "found = [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return found # create a valid", "circuit def create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect = [start] max = len(self.__coupling_map)", "path that connect qubits used in the circuit def create_path(self, start, plain_map): self.__path.update({start:", "next, visited, ranks) # TODO Try using some sort of centrality algorithm def", "directory=directory) return try: counts = result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits %d -", "n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status =", "if count <= 0: break self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create() - connected:\\n%s',", "logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if ('available' in", "Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp()", "for source in graph: visited.update({source: []}) self.explore(source, source, visited, ranks) # create an", "self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null argument: coupling_map') exit(1) def", "def explore(self, source, visiting, visited, ranks): for next in self.__coupling_map[visiting]: if next not", "an inverted coupling-map for further use @staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph is", "+ 1) != n_qubits: sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1],", "to_connect = [start] max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count = max", "for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit def create(self, circuit,", "ranks: ranks.update({next: 0}) ranks[next] = ranks[next] + 1 self.explore(source, next, visited, ranks) #", "if oracle == '11': logger.log(logging.VERBOSE, 'place_cx() - oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]],", "try: result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception", "try: result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception", "qubit, self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx() - oracle = 10') if", "'w') # store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop =", "start, quantum_r, initial=True, x=True): for qubit in self.__connected: if qubit != start: circuit.h(quantum_r[qubit])", "return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r],", "self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits)", "str(visiting), str(to_connect[visiting])) # for visiting in to_connect: if count <= 0: break for", "coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map)", "License. # ============================================================================= __author__ = \"<NAME>\" __copyright__ = \"Copyright 2017, Quantum Information Science,", "3: logger.critical('Qubits %d - Oracle %s - Execution %d - Queries %d ----", "centrality algorithm def start_explore(self, graph, ranks): visited = dict() for source in graph:", "# find the most connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected =", "Oracle %s - Execution %d - Queries %d ---- Waiting for credits to", "graph: visited.update({source: []}) self.explore(source, source, visited, ranks) # create an inverted coupling-map for", "connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False)", "= utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source))", "else: logger.critical('launch_exp() - Unknown device.') exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) #", "sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Oracle %s -", "- reverse in for 1st loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp()", "quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r])", "if (n + stop + 1) != n_qubits: sorted_v.append(reverse[connected[n + stop + 1]])", "envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results =", "str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1]", "inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node in graph: if node not in inverse_graph:", "method def place_cx(self, circuit, quantum_r, oracle='11'): if not oracle == '00': logger.log(logging.VERBOSE, 'place_cx()", "device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict()", "%s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot", "utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try: counts = result.get_counts('parity') except Exception: logger.critical('Exception", "True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X gates def place_x(self, circuit, quantum_r): sorted_c", "value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t' +", "- Cannot connect qubit %s to qubit %s', str(control), str(target)) exit(3) # place", "counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop", "// 2 for i in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse", "execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size)", "- Can use only up to %s qubits', str(max_qubits)) exit(2) count = self.__n_qubits", "retrying\\nQubits %d - Execution %d - Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device,", "self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i:", "'_' + str( num_shots) + 'queries_' + oracle + '_' + str( n_qubits)", "sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits,", "start: circuit.h(quantum_r[qubit]) else: if initial is True: if x is True: circuit.x(quantum_r[qubit]) else:", "= i[0][::-1] sorted_v = [] for n in range(n_qubits - stop): sorted_v.append(reverse[connected[n +", "in sorted_c: if count <= 0: break if i >= s_0: circuit.x(quantum_r[qubit[0]]) else:", "- Unknown device.') exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the", "1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd for loop: %s%s', str(connected[n +", "Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is back online, resuming execution', device) except ConnectionError:", "= Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if", "qubits for %s !', device) exit(1) elif device == qx3 or device ==", "if n_qubits <= 5: size = 5 elif n_qubits <= 16: size =", "quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x is True:", "for next in self.__coupling_map[visiting]: if next not in visited[source]: visited[source].append(next) if next not", "device == qx2 or device == qx4: if n_qubits <= 5: size =", "much qubits for %s !', device) exit(2) elif device == online_sim: if n_qubits", "coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected", "!= start: circuit.h(quantum_r[qubit]) else: if initial is True: if x is True: circuit.x(quantum_r[qubit])", "for visiting in to_connect: if count <= 0: break for node in plain_map[to_connect[visiting]]:", "circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit def create(self, circuit, quantum_r, classical_r, n_qubits, x=True,", "file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2 for i in sorted_c: reverse =", "envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r =", "quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys())", "# TODO Try using some sort of centrality algorithm def start_explore(self, graph, ranks):", "size = 5 elif n_qubits <= 16: size = 16 else: logger.critical('launch_exp() -", "self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x)", "ranks): visited = dict() for source in graph: visited.update({source: []}) self.explore(source, source, visited,", "3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000,", "max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots", "OF ANY KIND, either express or implied. # See the License for the", "coupling-map for further use @staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph is None: inverse_graph", "University of Parma, Italy. All Rights Reserved. # # Licensed under the Apache", "= logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object): def __init__(self, coupling_map): self.__coupling_map", "n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts =", "'place_cx() - oracle = 10') if stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit,", "credits to replenish...', n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits", "QISKit from the Git repo and use like a global. \"../qiskit-sdk-py\") from qiskit", "1 changed = True visiting = 0 while count > 0: logger.debug('create_path() -", "{} for end in graph: for start in graph[end]: if start not in", "except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d", "- oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle == '10':", "node in plain_map[to_connect[visiting]]: if count <= 0: break if node not in self.__path:", "16: size = 16 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much", "ranks.update({next: 0}) ranks[next] = ranks[next] + 1 self.explore(source, next, visited, ranks) # TODO", "is False) \\ or ('busy' in backend_status and backend_status['busy'] is True): logger.critical('%s currently", "== online_sim: if n_qubits <= 5: size = 5 elif n_qubits <= 16:", "num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result =", "else: inverse_graph[start].append(end) for node in graph: if node not in inverse_graph: inverse_graph.update({node: []})", "circuit.x(quantum_r[qubit[0]]) i += 1 # final measure def measure(self, circuit, quantum_r, classical_r): for", "to_connect.append(node) visiting += 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit, target_qubit,", "0 count = self.__n_qubits - 1 for qubit in sorted_c: if count <=", "is None: inverse_graph = {} for end in graph: for start in graph[end]:", "logger.critical('Backend is not available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits", "circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected", "measure(self, circuit, quantum_r, classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the", "coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i", "num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit =", "on the given device def envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True)", "of Parma, Italy\" __license__ = \"Apache\" __version__ = \"2.0\" __email__ = \"<EMAIL>\" import", "TODO Try using some sort of centrality algorithm def start_explore(self, graph, ranks): visited", "self.__path: if count <= 0: break self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create() -", "specific language governing permissions and # limitations under the License. # ============================================================================= __author__", "is False: sleep(1800) logger.critical('%s is back online, resuming execution', device) except ConnectionError: logger.critical('Error", "in 1st for loop: %s', str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n + 1]])", "quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1 # place Hadamard gates def place_h(self,", "i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1 # final measure def", "# solve the relative dependencies if you clone QISKit from the Git repo", "str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in", "counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device +", "for qubit in sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i +=", "%s', str(visiting), str(to_connect[visiting])) # for visiting in to_connect: if count <= 0: break", "count = max - 1 changed = True visiting = 0 while count", "for qubit in self.__connected: if self.__connected[qubit] != -1: if oracle == '11': logger.log(logging.VERBOSE,", "in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node in graph: if node not", "2 i = 0 count = self.__n_qubits - 1 for qubit in sorted_c:", "elif n_qubits <= 16: size = 16 else: logger.critical('launch_exp() - Unknown device.') exit(3)", "logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected # launch envariance", "import os from time import sleep from devices import * import logging import", "%d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts", "+= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit, target_qubit, control, target):", "create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect = [start] max = len(self.__coupling_map) logger.debug('create_path() -", "or agreed to in writing, software # distributed under the License is distributed", "quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x is True: self.place_x(circuit, quantum_r) self.measure(circuit,", "is True: if x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X gates", "'queries_' + oracle + '_' + str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f", "execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s',", "waiting...', device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is back online, resuming execution',", "= dict() for source in graph: visited.update({source: []}) self.explore(source, source, visited, ranks) #", "= 10') if stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -=", "n_qubits <= 5: size = 5 elif n_qubits <= 16: size = 16", "inverse_graph[start].append(end) for node in graph: if node not in inverse_graph: inverse_graph.update({node: []}) #", "+ 1])) if (n + stop + 1) != n_qubits: sorted_v.append(reverse[connected[n + stop", "- path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit, target_qubit, control, target): if target in", "str(i[1]) + '\\n') out_f.close() # launch parity experiment on the given device def", "[end]}) else: inverse_graph[start].append(end) for node in graph: if node not in inverse_graph: inverse_graph.update({node:", "= ranks[next] + 1 self.explore(source, next, visited, ranks) # TODO Try using some", "sys sys.path.append( # solve the relative dependencies if you clone QISKit from the", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "qiskit import QuantumProgram import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False", "control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect qubit %s to qubit %s',", "'\\t' + str(i[1]) + '\\n') out_f.close() # launch parity experiment on the given", "logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() -", "License. # You may obtain a copy of the License at # #", "key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return found # create a", "5: size = 5 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much", "and # limitations under the License. # ============================================================================= __author__ = \"<NAME>\" __copyright__ =", "in self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit def create(self, circuit, quantum_r, classical_r,", "n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try:", "device def parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size =", "else: logger.critical('cx() - Cannot connect qubit %s to qubit %s', str(control), str(target)) exit(3)", "logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd for loop: %s,%s', str(connected[n + 1]),", "if next not in ranks: ranks.update({next: 0}) ranks[next] = ranks[next] + 1 self.explore(source,", "QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device)", "sorted_v.append(reverse[connected[n + stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected =", "- Execution %d - Queries %d', n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility,", "Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits,", "# store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s',", "utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while", "given device def parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size", "%s%s', str(connected[n + stop + 1]), str(sorted_v[n + 2])) value = ''.join(str(v) for", "connected # launch envariance experiment on the given device def envariance_exec(execution, device, utility,", "oracle='11'): self.__n_qubits = n_qubits max_qubits = len(self.__path) logger.debug('create() - N qubits: %s', str(self.__n_qubits))", "explore(self, source, visiting, visited, ranks): for next in self.__coupling_map[visiting]: if next not in", "- connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected # launch envariance experiment", "self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null argument: coupling_map') exit(1)", "stop + 1]), str(sorted_v[n + 2])) value = ''.join(str(v) for v in sorted_v)", "currently offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is back online,", "oracle == '00': logger.log(logging.VERBOSE, 'place_cx() - oracle != 00') stop = self.__n_qubits //", "solve the relative dependencies if you clone QISKit from the Git repo and", "visited.update({source: []}) self.explore(source, source, visited, ranks) # create an inverted coupling-map for further", "n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution')", "str(max_qubits)) exit(2) count = self.__n_qubits for qubit in self.__path: if count <= 0:", "device def envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0", "License, Version 2.0 (the \"License\"); # you may not use this file except", "language governing permissions and # limitations under the License. # ============================================================================= __author__ =", "is True): logger.critical('%s currently offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s", "2nd for loop: %s,%s', str(connected[n + 1]), str(sorted_v[n + 1])) if (n +", "University of Parma, Italy\" __license__ = \"Apache\" __version__ = \"2.0\" __email__ = \"<EMAIL>\"", "= Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if", "2 for i in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in", "# launch parity experiment on the given device def parity_exec(execution, device, utility, n_qubits,", "count = self.__n_qubits for qubit in self.__path: if count <= 0: break self.__connected.update({qubit:", "0: break self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0],", "self.__connected: circuit.measure(quantum_r[qubit], classical_r[qubit]) # create the circuit def create(self, circuit, quantum_r, classical_r, n_qubits,", "dict() for source in graph: visited.update({source: []}) self.explore(source, source, visited, ranks) # create", "backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d -", "for i in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in for", "QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device)", "return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r],", "len(self.__path) logger.debug('create() - N qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max qubits: %s', str(max_qubits))", "n in range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]])", "- Max qubits: %s', str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create() - Can use", "= [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE,", "Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits)", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "1st loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st", "str(sorted_c)) s_0 = self.__n_qubits // 2 i = 0 count = self.__n_qubits -", "0 self.__ranks = dict() self.__connected = dict() self.__most_connected = [] if coupling_map: self.__coupling_map", "you clone QISKit from the Git repo and use like a global. \"../qiskit-sdk-py\")", "qx4: if n_qubits <= 5: size = 5 # device = 'ibmqx_qasm_simulator' else:", "quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s',", "= 0 for qubit in sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]])", "# final measure def measure(self, circuit, quantum_r, classical_r): for qubit in self.__connected: circuit.measure(quantum_r[qubit],", "coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() -", "source, visited, ranks) # create an inverted coupling-map for further use @staticmethod def", "+ '/' + device + '_' + str(num_shots) + '_' + str( n_qubits)", "n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try:", "%s - Execution %d - Queries %d ---- Waiting for credits to replenish...',", "if x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X gates def place_x(self,", "in 1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st for", "result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred,", "elif device == online_sim: if n_qubits <= 5: size = 5 elif n_qubits", "logger.debug('create_path() - visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) # for visiting in to_connect: if", "def place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c))", "%s qubits', str(max_qubits)) exit(2) count = self.__n_qubits for qubit in self.__path: if count", "def __init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map = dict() self.__plain_map = dict() self.__path", "+ device + '_' + str( num_shots) + 'queries_' + oracle + '_'", "exit(1) elif device == qx3 or device == qx5: if n_qubits <= 16:", "max_qubits < self.__n_qubits: logger.critical('create() - Can use only up to %s qubits', str(max_qubits))", "+ str( execution) + '/' + device + '_' + str(num_shots) + '_'", "= 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx()", "or implied. # See the License for the specific language governing permissions and", "sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits // 2 i = 0 count = self.__n_qubits", "self.__coupling_map = dict() self.__inverse_coupling_map = dict() self.__plain_map = dict() self.__path = dict() self.__n_qubits", "# create the circuit def create(self, circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits", "or device == qx4: if n_qubits <= 5: size = 5 # device", "circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X gates def place_x(self, circuit, quantum_r): sorted_c =", "Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try:", "Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits, execution,", "'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits // 2 i = 0 count", "open(filename, 'w') # store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop", "n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\",", "logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit, target_qubit, control, target): if target", "<= 5: size = 5 elif n_qubits <= 16: size = 16 else:", "%s', str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create() - Can use only up to", "filename = directory + device + '/' + 'execution' + str( execution) +", "'11': logger.log(logging.VERBOSE, 'place_cx() - oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif", "sorted_v = [] for n in range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]]) for", "dict() self.__plain_map = dict() self.__path = dict() self.__n_qubits = 0 self.__ranks = dict()", "except ValueError: logger.critical('Backend is not available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] <", "ranks[most_connected]] logger.debug('max: %s', str(found)) return found # create a valid path that connect", "logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop = n_qubits // 2 for i in sorted_c:", "Cannot connect qubit %s to qubit %s', str(control), str(target)) exit(3) # place cnot", "str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device + '/'", "like a global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram import Qconfig logger = logging.getLogger('utility')", "0}) ranks[next] = ranks[next] + 1 self.explore(source, next, visited, ranks) # TODO Try", "continue except ValueError: logger.critical('Backend is not available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining']", "break for node in plain_map[to_connect[visiting]]: if count <= 0: break if node not", "= 0 self.__connected.clear() return connected # launch envariance experiment on the given device", "qubit, self.__connected[qubit]) stop -= 1 # place Hadamard gates def place_h(self, circuit, start,", "logger.critical('%s currently offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is back", "use this file except in compliance with the License. # You may obtain", "str(target)) exit(3) # place cnot gates based on the path created in create_path", "loop: %s%s', str(connected[n + stop + 1]), str(sorted_v[n + 2])) value = ''.join(str(v)", "num_shots=num_shots, directory=directory) return try: counts = result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits %d", "str(self.__path)) if node not in to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path() - path:\\n%s',", "+ '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts in txt", "num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r", "if ('available' in backend_status and backend_status['available'] is False) \\ or ('busy' in backend_status", "create an inverted coupling-map for further use @staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph", "further use @staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph is None: inverse_graph = {}", "file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() - oredred_q:\\n%s', str(connected)) stop = n_qubits //", "2 for i in sorted_c: reverse = i[0][::-1] sorted_v = [] for n", "str(self.__path)) def cx(self, circuit, control_qubit, target_qubit, control, target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE,", "utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True:", "oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if device", "for loop: %s,%s', str(connected[n + 1]), str(sorted_v[n + 1])) if (n + stop", "for the specific language governing permissions and # limitations under the License. #", "num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if device ==", "cx(self, circuit, control_qubit, target_qubit, control, target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() -", "[] for n in range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]]) for n in", "utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict()", "- Oracle %s - Execution %d - Queries %d', n_qubits, oracle, execution, num_shots)", "-= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if node not in to_connect: to_connect.append(node) visiting", "circuit def create(self, circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits", "API url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Execution %d", "online, resuming execution', device) except ConnectionError: logger.critical('Error getting backend status, retrying...') sleep(900) continue", "n in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd", "%d ---- Waiting for credits to replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] <", "except ConnectionError: logger.critical('Error getting backend status, retrying...') sleep(900) continue except ValueError: logger.critical('Backend is", "// 2 i = 0 count = self.__n_qubits - 1 for qubit in", "'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s !', device) exit(1) elif", "place Hadamard gates def place_h(self, circuit, start, quantum_r, initial=True, x=True): for qubit in", "directory=directory) return try: counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits %d -", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "1 # place Hadamard gates def place_h(self, circuit, start, quantum_r, initial=True, x=True): for", "reverse in for 1st loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() -", "execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts))", "connected def parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False,", "None: inverse_graph = {} for end in graph: for start in graph[end]: if", "for qubit in self.__path: if count <= 0: break self.__connected.update({qubit: self.__path[qubit]}) count -=", "connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected def parity(self, circuit, quantum_r, classical_r,", "for qubit in self.__connected: if qubit != start: circuit.h(quantum_r[qubit]) else: if initial is", "reverse=True) filename = directory + device + '/' + oracle + '/' +", "break self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r,", "utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "if count <= 0: break if node not in self.__path: self.__path.update({node: to_connect[visiting]}) count", "# store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits", "oracle == '11': logger.log(logging.VERBOSE, 'place_cx() - oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit,", "parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r", "-1}) to_connect = [start] max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count =", "!= -1: if oracle == '11': logger.log(logging.VERBOSE, 'place_cx() - oracle = 11') self.cx(circuit,", "the circuit def create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect = [start] max =", "'\\n') out_f.close() # launch parity experiment on the given device def parity_exec(execution, device,", "Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute(['parity'], backend=device,", "Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp()", "i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1 i = 0 for", "Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d", "('available' in backend_status and backend_status['available'] is False) \\ or ('busy' in backend_status and", "- path:\\n%s', str(self.__path)) if node not in to_connect: to_connect.append(node) visiting += 1 logger.debug('create_path()", "shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s -", "with the License. # You may obtain a copy of the License at", "self.__connected.clear() return connected def parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r,", "sorted_v in 1st for loop: %s', str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n +", "oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s',", "visiting, visited, ranks): for next in self.__coupling_map[visiting]: if next not in visited[source]: visited[source].append(next)", "sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd for loop: %s,%s',", "execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts = result.get_counts(\"envariance\")", "= [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return found # create a valid path", "url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Oracle %s -", "law or agreed to in writing, software # distributed under the License is", "Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object): def __init__(self,", "Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity')", "directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity',", "str(max)) count = max - 1 changed = True visiting = 0 while", "Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d -", "# place Hadamard gates def place_h(self, circuit, start, quantum_r, initial=True, x=True): for qubit", "# place Pauli-X gates def place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE,", "logger.propagate = False class Utility(object): def __init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map =", "offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is back online, resuming", "self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit)", "+ 1]), str(sorted_v[n + 2])) value = ''.join(str(v) for v in sorted_v) results.update({value:", "16: size = 16 else: logger.critical('launch_exp() - Unknown device.') exit(3) Q_program = QuantumProgram()", "self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected =", "sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d - Queries", "Unknown device.') exit(3) Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken", "in compliance with the License. # You may obtain a copy of the", "logger.critical('API Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits, execution,", "1 for qubit in sorted_c: if count <= 0: break if i >=", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle)", "[] if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map)", "time import sleep from devices import * import logging import myLogger import operator", "qubit != start: circuit.h(quantum_r[qubit]) else: if initial is True: if x is True:", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "+ oracle + '/' + 'execution' + str( execution) + '/' + device", "<= 5: size = 5 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "next not in ranks: ranks.update({next: 0}) ranks[next] = ranks[next] + 1 self.explore(source, next,", "oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() -", "''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t' + str(i[1]) +", "most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return found", "= 0 self.__connected.clear() return connected def parity(self, circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit,", "logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s',", "self.__n_qubits = 0 self.__connected.clear() return connected # launch envariance experiment on the given", "'00': logger.log(logging.VERBOSE, 'place_cx() - oracle != 00') stop = self.__n_qubits // 2 for", "oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r =", "= 0 count = self.__n_qubits - 1 for qubit in sorted_c: if count", "self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x is True: self.place_x(circuit, quantum_r)", "repo and use like a global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram import Qconfig", "execution', device) except ConnectionError: logger.critical('Error getting backend status, retrying...') sleep(900) continue except ValueError:", "graph: if node not in inverse_graph: inverse_graph.update({node: []}) # find the most connected", "not available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d -", "count <= 0: break self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create() - connected:\\n%s', str(self.__connected))", "count -= 1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r,", "Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Oracle %s - Execution %d - Queries", "and backend_status['busy'] is True): logger.critical('%s currently offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is False:", "n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() -", "logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else:", "connected[n+1], sorted_v[n+1] in 2nd for loop: %s,%s', str(connected[n + 1]), str(sorted_v[n + 1]))", "+ 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd for loop: %s%s', str(connected[n", "oracle + '_' + str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename,", "QuantumProgram import Qconfig logger = logging.getLogger('utility') logger.addHandler(myLogger.MyHandler()) logger.setLevel(logging.CRITICAL) logger.propagate = False class Utility(object):", "sleep from devices import * import logging import myLogger import operator import sys", "Queries %d ---- Waiting for credits to replenish...', n_qubits, oracle, execution, num_shots) while", "+ 2])) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value +", "sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits, execution,", "num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename", "11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx() -", "i in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in for 1st", "+ oracle + '_' + str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f =", "create the circuit def create(self, circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'): self.__n_qubits =", "1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st for loop:", "if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i += 1 # final measure", "logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5)", "x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X gates def place_x(self, circuit,", "in backend_status and backend_status['available'] is False) \\ or ('busy' in backend_status and backend_status['busy']", "n in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]})", "oracle='11'): if not oracle == '00': logger.log(logging.VERBOSE, 'place_cx() - oracle != 00') stop", "target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit, target_qubit)", "backend_status and backend_status['busy'] is True): logger.critical('%s currently offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is", "str(to_connect[visiting])) # for visiting in to_connect: if count <= 0: break for node", "node in graph: if node not in inverse_graph: inverse_graph.update({node: []}) # find the", "i += 1 i = 0 for qubit in sorted_c: if i >=", "True: if x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X gates def", "inverse_graph=None): if inverse_graph is None: inverse_graph = {} for end in graph: for", "'cx() - inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit)", "self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx() - oracle = 10') if stop", "much qubits for %s !', device) exit(1) elif device == qx3 or device", "x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x is True: self.place_x(circuit,", "dict() if device == qx2 or device == qx4: if n_qubits <= 5:", "n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance()", "launch parity experiment on the given device def parity_exec(execution, device, utility, n_qubits, oracle='11',", "= self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null argument: coupling_map') exit(1) def close(self):", "%d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() -", "'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] +", "logger.log(logging.VERBOSE, 'place_cx() - oracle = 10') if stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]],", "except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Execution %d - Shots", "n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if", "retrying\\nQubits %d - Oracle %s - Execution %d - Queries %d', n_qubits, oracle,", "n_qubits max_qubits = len(self.__path) logger.debug('create() - N qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max", "i += 1 # final measure def measure(self, circuit, quantum_r, classical_r): for qubit", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "occurred, retrying\\nQubits %d - Oracle %s - Execution %d - Queries %d', n_qubits,", "size = 0 results = dict() if device == qx2 or device ==", "str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create() - Can use only up to %s", "= [] if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map,", "Execution %d - Queries %d ---- Waiting for credits to replenish...', n_qubits, oracle,", "dict() self.__connected = dict() self.__most_connected = [] if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG,", "0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1 # place Hadamard gates", "if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Execution %d - Shots %d ----", "len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count = max - 1 changed = True", "Italy\" __license__ = \"Apache\" __version__ = \"2.0\" __email__ = \"<EMAIL>\" import os from", "dict() self.__n_qubits = 0 self.__ranks = dict() self.__connected = dict() self.__most_connected = []", "visiting = 0 while count > 0: logger.debug('create_path() - visiting:\\n%s - %s', str(visiting),", "circuit, quantum_r, classical_r, n_qubits, oracle='11'): self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected =", "visited, ranks) # create an inverted coupling-map for further use @staticmethod def invert_graph(graph,", "logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i]", "num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try: counts = result.get_counts('parity')", "self.explore(source, source, visited, ranks) # create an inverted coupling-map for further use @staticmethod", "this file except in compliance with the License. # You may obtain a", "using some sort of centrality algorithm def start_explore(self, graph, ranks): visited = dict()", "except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d', n_qubits,", "qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max qubits: %s', str(max_qubits)) if max_qubits < self.__n_qubits:", "while count > 0: logger.debug('create_path() - visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) # for", "coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source, visiting,", "ValueError: logger.critical('Backend is not available, waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3:", "- visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) # for visiting in to_connect: if count", "txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2 for i", "logger.log(logging.VERBOSE, 'launch_exp() - reverse in for 1st loop: %s', str(reverse)) sorted_v = [reverse[connected[0]]]", "under the License. # ============================================================================= __author__ = \"<NAME>\" __copyright__ = \"Copyright 2017, Quantum", "execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try: counts =", "== qx5: if n_qubits <= 16: size = 16 # device = 'ibmqx_qasm_simulator'", "return try: counts = result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Execution", "+ '_' + str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w')", "if inverse_graph is None: inverse_graph = {} for end in graph: for start", "'place_cx() - oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle ==", "sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots,", "'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s !', device) exit(2) elif", "sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits // 2 i", "execution) + '/' + device + '_' + str( num_shots) + 'queries_' +", "import logging import myLogger import operator import sys sys.path.append( # solve the relative", "Hadamard gates def place_h(self, circuit, start, quantum_r, initial=True, x=True): for qubit in self.__connected:", "of centrality algorithm def start_explore(self, graph, ranks): visited = dict() for source in", "plain_map[to_connect[visiting]]: if count <= 0: break if node not in self.__path: self.__path.update({node: to_connect[visiting]})", "in self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if node", "except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Oracle %s - Execution", "to_connect[visiting]}) count -= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if node not in to_connect:", "if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Oracle %s - Execution %d -", "clone QISKit from the Git repo and use like a global. \"../qiskit-sdk-py\") from", "def place_h(self, circuit, start, quantum_r, initial=True, x=True): for qubit in self.__connected: if qubit", "envariance experiment on the given device def envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'):", "sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device + '/' + 'execution' +", "- connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r,", "False) \\ or ('busy' in backend_status and backend_status['busy'] is True): logger.critical('%s currently offline,", "in create_path method def place_cx(self, circuit, quantum_r, oracle='11'): if not oracle == '00':", "import sleep from devices import * import logging import myLogger import operator import", "self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x is", "classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try:", "quantum_r, oracle='11'): if not oracle == '00': logger.log(logging.VERBOSE, 'place_cx() - oracle != 00')", "'launch_exp() - sorted_v in 1st for loop: %s', str(sorted_v)) for n in range(stop):", "True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit,", "of Parma, Italy. All Rights Reserved. # # Licensed under the Apache License,", "out_f = open(filename, 'w') # store counts in txt file and xlsx file", "if n_qubits <= 5: size = 5 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp()", "out_f.write(value + '\\t' + str(i[1]) + '\\n') out_f.close() # launch parity experiment on", "if qubit != start: circuit.h(quantum_r[qubit]) else: if initial is True: if x is", "i[1]}) out_f.write(value + '\\t' + str(i[1]) + '\\n') out_f.close() # launch parity experiment", "quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1 # place Hadamard gates def place_h(self, circuit,", "circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 =", "self.__n_qubits // 2 for qubit in self.__connected: if self.__connected[qubit] != -1: if oracle", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "+ 'execution' + str( execution) + '/' + device + '_' + str(", "execution') try: result = Q_program.execute([\"envariance\"], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900)", "# ============================================================================= __author__ = \"<NAME>\" __copyright__ = \"Copyright 2017, Quantum Information Science, University", "exist_ok=True) size = 0 results = dict() if device == qx2 or device", "x=True, oracle='11'): self.__n_qubits = n_qubits max_qubits = len(self.__path) logger.debug('create() - N qubits: %s',", "try: backend_status = Q_program.get_backend_status(device) if ('available' in backend_status and backend_status['available'] is False) \\", "= Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits", "+ '/' + device + '_' + str( num_shots) + 'queries_' + oracle", "Quantum Information Science, University of Parma, Italy\" __license__ = \"Apache\" __version__ = \"2.0\"", "logger.debug('create() - Max qubits: %s', str(max_qubits)) if max_qubits < self.__n_qubits: logger.critical('create() - Can", "based on the path created in create_path method def place_cx(self, circuit, quantum_r, oracle='11'):", "ranks): for next in self.__coupling_map[visiting]: if next not in visited[source]: visited[source].append(next) if next", "n_qubits // 2 for i in sorted_c: reverse = i[0][::-1] sorted_v = []", "gates based on the path created in create_path method def place_cx(self, circuit, quantum_r,", "if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)', str(control), str(target)) circuit.cx(control_qubit,", "True): logger.critical('%s currently offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is", "required by applicable law or agreed to in writing, software # distributed under", "os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if device == qx2 or", "in visited[source]: visited[source].append(next) if next not in ranks: ranks.update({next: 0}) ranks[next] = ranks[next]", "from time import sleep from devices import * import logging import myLogger import", "import sys sys.path.append( # solve the relative dependencies if you clone QISKit from", "to qubit %s', str(control), str(target)) exit(3) # place cnot gates based on the", "num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results = dict() if device ==", "control_qubit, target_qubit, control, target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s,", "circuit.h(target_qubit) else: logger.critical('cx() - Cannot connect qubit %s to qubit %s', str(control), str(target))", "in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks)", "num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c", "'_' + str(num_shots) + '_' + str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f", "# device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s !',", "class Utility(object): def __init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map = dict() self.__plain_map =", "self.create(circuit, quantum_r, classical_r, n_qubits, x=False, oracle=oracle) connected = list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected))", "def parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0", "'_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts in txt file", "%s', str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1],", "for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st for loop: %s',", "0 for qubit in sorted_c: if i >= s_0: circuit.iden(quantum_r[qubit[0]]) else: circuit.x(quantum_r[qubit[0]]) i", "\"2.0\" __email__ = \"<EMAIL>\" import os from time import sleep from devices import", "= directory + device + '/' + oracle + '/' + 'execution' +", "logger.log(logging.VERBOSE, 'place_cx() - oracle != 00') stop = self.__n_qubits // 2 for qubit", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "not in visited[source]: visited[source].append(next) if next not in ranks: ranks.update({next: 0}) ranks[next] =", "logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits // 2 i = 0", "# set the APIToken and API url except ConnectionError: sleep(900) logger.critical('API Exception occurred,", "self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init() - Null argument: coupling_map') exit(1) def close(self): self.__ranks.clear()", "if node not in self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path() - path:\\n%s',", "self.__connected.clear() return connected # launch envariance experiment on the given device def envariance_exec(execution,", "= max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return found #", "%s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st for loop: %s', str(sorted_v)) for", "device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800) logger.critical('%s is back online, resuming execution', device)", "if stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1 #", "credits to replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished,", "= open(filename, 'w') # store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n')", "that connect qubits used in the circuit def create_path(self, start, plain_map): self.__path.update({start: -1})", "Science, University of Parma, Italy\" __license__ = \"Apache\" __version__ = \"2.0\" __email__ =", "device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c =", "quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\", size) circuit = Q_program.create_circuit('parity', [quantum_r], [classical_r])", "most connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found", "3: logger.critical('Qubits %d - Execution %d - Shots %d ---- Waiting for credits", "sleep(900) continue except ValueError: logger.critical('Backend is not available, waiting...') sleep(900) continue break if", "'place_cx() - oracle != 00') stop = self.__n_qubits // 2 for qubit in", "exit(2) elif device == online_sim: if n_qubits <= 5: size = 5 elif", "- Too much qubits for %s !', device) exit(2) elif device == online_sim:", "1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd for loop: %s,%s', str(connected[n +", "classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0]", "in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit,", "oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r,", "('busy' in backend_status and backend_status['busy'] is True): logger.critical('%s currently offline, waiting...', device) while", "key=operator.itemgetter(1), reverse=True) filename = directory + device + '/' + 'execution' + str(", "+ device + '/' + 'execution' + str( execution) + '/' + device", "graph, ranks): visited = dict() for source in graph: visited.update({source: []}) self.explore(source, source,", "in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t' + str(i[1]) + '\\n') out_f.close() #", "return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory", "- plain map:\\n%s', str(self.__plain_map)) self.start_explore(self.__coupling_map, self.__ranks) self.__most_connected = self.find_max(self.__ranks) self.create_path(self.__most_connected[0], plain_map=self.__plain_map) else: logger.critical('init()", "Quantum Information Science, University of Parma, Italy. All Rights Reserved. # # Licensed", "sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd for", "sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute(['parity'], backend=device, wait=2, timeout=1000, shots=num_shots,", "max - 1 changed = True visiting = 0 while count > 0:", "inverse_graph.update({node: []}) # find the most connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks))", "+= 1 i = 0 for qubit in sorted_c: if i >= s_0:", "self.explore(source, next, visited, ranks) # TODO Try using some sort of centrality algorithm", "0: break for node in plain_map[to_connect[visiting]]: if count <= 0: break if node", "str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if", "1]), str(sorted_v[n + 2])) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]})", "logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected def parity(self, circuit,", "visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) # for visiting in to_connect: if count <=", "# you may not use this file except in compliance with the License.", "qubits used in the circuit def create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect =", "sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits =", "from devices import * import logging import myLogger import operator import sys sys.path.append(", "str( n_qubits) + '_qubits_parity.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store counts", "__init__(self, coupling_map): self.__coupling_map = dict() self.__inverse_coupling_map = dict() self.__plain_map = dict() self.__path =", "in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v in sorted_v) results.update({value: i[1]}) out_f.write(value", "= 0 while count > 0: logger.debug('create_path() - visiting:\\n%s - %s', str(visiting), str(to_connect[visiting]))", "self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit, quantum_r, oracle=oracle) self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x", "sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in for 1st loop: %s',", "= \"Apache\" __version__ = \"2.0\" __email__ = \"<EMAIL>\" import os from time import", "not in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node in graph: if node", "%d - Shots %d', n_qubits, execution, num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory)", "circuit = Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source =", "10') if stop > 0: self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) stop -= 1", "execution) + '/' + device + '_' + str(num_shots) + '_' + str(", "n_qubits // 2 for i in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() -", "the specific language governing permissions and # limitations under the License. # =============================================================================", "result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots %d',", "- oredred_q:\\n%s', str(connected)) stop = n_qubits // 2 for i in sorted_c: reverse", "for %s !', device) exit(2) elif device == online_sim: if n_qubits <= 5:", "in sorted_c: reverse = i[0][::-1] sorted_v = [] for n in range(n_qubits -", "+ stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v in", "@staticmethod def find_max(ranks): logger.debug('ranks:\\n%s', str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]]", "device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try: counts = result.get_counts('parity') except Exception:", "APIToken and API url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d -", "size = 5 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits", "a valid path that connect qubits used in the circuit def create_path(self, start,", "for end in graph: for start in graph[end]: if start not in inverse_graph:", "n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp()", "Parma, Italy\" __license__ = \"Apache\" __version__ = \"2.0\" __email__ = \"<EMAIL>\" import os", "License for the specific language governing permissions and # limitations under the License.", "graph[end]: if start not in inverse_graph: inverse_graph.update({start: [end]}) else: inverse_graph[start].append(end) for node in", "== qx3 or device == qx5: if n_qubits <= 16: size = 16", "\"License\"); # you may not use this file except in compliance with the", "logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2] in 2nd for loop: %s%s', str(connected[n + stop", "oracle + '/' + 'execution' + str( execution) + '/' + device +", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "+ 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd for loop: %s,%s', str(connected[n", "ranks) # TODO Try using some sort of centrality algorithm def start_explore(self, graph,", "%s', str(found)) return found # create a valid path that connect qubits used", "\\ or ('busy' in backend_status and backend_status['busy'] is True): logger.critical('%s currently offline, waiting...',", "max_qubits = len(self.__path) logger.debug('create() - N qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max qubits:", "= result.get_counts(\"envariance\") except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d - Shots", "- coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for", "n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1),", "inverse coupling map:\\n%s', str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init()", "'w') # store counts in txt file and xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') logger.debug('launch_exp() -", "Exception occurred, retrying\\nQubits %d - Oracle %s - Execution %d - Queries %d',", "the given device def envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size", "getting backend status, retrying...') sleep(900) continue except ValueError: logger.critical('Backend is not available, waiting...')", "Pauli-X gates def place_x(self, circuit, quantum_r): sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() -", "stop + 1) != n_qubits: sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() -", "stop): sorted_v.append(reverse[connected[n + stop]]) for n in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for", "device + '/' + oracle + '/' + 'execution' + str( execution) +", "oracle = 11') self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "'/' + 'execution' + str( execution) + '/' + device + '_' +", "in writing, software # distributed under the License is distributed on an \"AS", "# launch envariance experiment on the given device def envariance_exec(execution, device, utility, n_qubits,", "out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2 for i in sorted_c: reverse = i[0][::-1]", "<= 16: size = 16 else: logger.critical('launch_exp() - Unknown device.') exit(3) Q_program =", "<= 0: break self.__connected.update({qubit: self.__path[qubit]}) count -= 1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit,", "status, retrying...') sleep(900) continue except ValueError: logger.critical('Backend is not available, waiting...') sleep(900) continue", "> 0: logger.debug('create_path() - visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) # for visiting in", "str(found)) return found # create a valid path that connect qubits used in", "self.place_h(circuit, self.__most_connected[0], quantum_r, initial=False) if x is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r)", "visiting += 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit, target_qubit, control,", "sorted_c = sorted(self.__connected.items(), key=operator.itemgetter(0)) logger.log(logging.VERBOSE, 'place_x() - sorted_c:\\n%s', str(sorted_c)) s_0 = self.__n_qubits //", "in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd for", "Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status = Q_program.get_backend_status(device) if ('available'", "only up to %s qubits', str(max_qubits)) exit(2) count = self.__n_qubits for qubit in", "connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() - QASM:\\n%s',", "[reverse[connected[0]]] logger.log(logging.VERBOSE, 'launch_exp() - connected[0] in 1st for loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp()", "+ 'execution' + str( execution) + '/' + device + '_' + str(num_shots)", "0: logger.debug('create_path() - visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) # for visiting in to_connect:", "count <= 0: break if i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i +=", "execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\",", "or ('busy' in backend_status and backend_status['busy'] is True): logger.critical('%s currently offline, waiting...', device)", "utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(),", "= dict() if device == qx2 or device == qx4: if n_qubits <=", "in sorted_c: reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in for 1st loop:", "xlsx file out_f.write('VALUES\\t\\tCOUNTS\\n\\n') stop = n_qubits // 2 for i in sorted_c: reverse", "for n in range(stop): sorted_v.append(reverse[connected[n]]) value = ''.join(str(v) for v in sorted_v) results.update({value:", "'launch_exp() - connected[n+1], sorted_v[n+1] in 2nd for loop: %s,%s', str(connected[n + 1]), str(sorted_v[n", "= \"Copyright 2017, Quantum Information Science, University of Parma, Italy\" __license__ = \"Apache\"", "directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename =", "inverse_graph: inverse_graph.update({node: []}) # find the most connected qubit @staticmethod def find_max(ranks): logger.debug('ranks:\\n%s',", "sorted_v[n+1] in 2nd for loop: %s,%s', str(connected[n + 1]), str(sorted_v[n + 1])) if", "sorted_v[n+2] in 2nd for loop: %s%s', str(connected[n + stop + 1]), str(sorted_v[n +", "loop: %s', str(sorted_v)) for n in range(stop): sorted_v.append(reverse[connected[n + 1]]) logger.log(logging.VERBOSE, 'launch_exp() -", "self.cx(circuit, quantum_r[qubit], quantum_r[self.__connected[qubit]], qubit, self.__connected[qubit]) elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx() - oracle", "inverse_graph is None: inverse_graph = {} for end in graph: for start in", "Try using some sort of centrality algorithm def start_explore(self, graph, ranks): visited =", "// 2 for qubit in self.__connected: if self.__connected[qubit] != -1: if oracle ==", "n_qubits <= 5: size = 5 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() -", "envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c =", "oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return try: counts", "'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling map:\\n%s', str(self.__inverse_coupling_map))", "quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try:", "def start_explore(self, graph, ranks): visited = dict() for source in graph: visited.update({source: []})", "= directory + device + '/' + 'execution' + str( execution) + '/'", "logger.critical('Qubits %d - Oracle %s - Execution %d - Queries %d ---- Waiting", "in self.__connected: if self.__connected[qubit] != -1: if oracle == '11': logger.log(logging.VERBOSE, 'place_cx() -", "2.0 (the \"License\"); # you may not use this file except in compliance", "str(self.__inverse_coupling_map)) for i in coupling_map: self.__plain_map.update({i: self.__inverse_coupling_map[i] + coupling_map[i]}) logger.debug('init() - plain map:\\n%s',", "== '10': logger.log(logging.VERBOSE, 'place_cx() - oracle = 10') if stop > 0: self.cx(circuit,", "next in self.__coupling_map[visiting]: if next not in visited[source]: visited[source].append(next) if next not in", "+ device + '/' + oracle + '/' + 'execution' + str( execution)", "device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results =", "is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X gates def place_x(self, circuit, quantum_r):", "classical_r[qubit]) # create the circuit def create(self, circuit, quantum_r, classical_r, n_qubits, x=True, oracle='11'):", "circuit.cx(control_qubit, target_qubit) elif control in self.__coupling_map[target]: logger.log(logging.VERBOSE, 'cx() - inverse-cnot: (%s, %s)', str(control),", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "is back online, resuming execution', device) except ConnectionError: logger.critical('Error getting backend status, retrying...')", "argument: coupling_map') exit(1) def close(self): self.__ranks.clear() self.__inverse_coupling_map.clear() self.__coupling_map.clear() self.__path.clear() self.__most_connected.clear() def explore(self, source,", "utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return quantum_r = Q_program.create_quantum_register(\"qr\", size) classical_r = Q_program.create_classical_register(\"cr\",", "1 # final measure def measure(self, circuit, quantum_r, classical_r): for qubit in self.__connected:", "# # Unless required by applicable law or agreed to in writing, software", "Q_program = QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken and API url", "express or implied. # See the License for the specific language governing permissions", "= 0 results = dict() if device == qx2 or device == qx4:", "either express or implied. # See the License for the specific language governing", "5: size = 5 elif n_qubits <= 16: size = 16 else: logger.critical('launch_exp()", "0: break if node not in self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path()", "count <= 0: break for node in plain_map[to_connect[visiting]]: if count <= 0: break", "Execution %d - Shots %d ---- Waiting for credits to replenish...', n_qubits, execution,", "if i >= s_0: circuit.x(quantum_r[qubit[0]]) else: circuit.iden(quantum_r[qubit[0]]) i += 1 i = 0", "classical_r) def envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c =", "+ device + '_' + str(num_shots) + '_' + str( n_qubits) + '_qubits_envariance.txt'", "quantum_r, initial=False) if x is True: self.place_x(circuit, quantum_r) self.measure(circuit, quantum_r, classical_r) def envariance(self,", "str(ranks)) most_connected = max(ranks.items(), key=operator.itemgetter(1))[0] found = [most_connected, ranks[most_connected]] logger.debug('max: %s', str(found)) return", "- connected[n+stop+1], sorted_v[n+2] in 2nd for loop: %s%s', str(connected[n + stop + 1]),", "backend_status = Q_program.get_backend_status(device) if ('available' in backend_status and backend_status['available'] is False) \\ or", "the License. # You may obtain a copy of the License at #", "= sorted(counts.items(), key=operator.itemgetter(1), reverse=True) filename = directory + device + '/' + oracle", "%d - Queries %d', n_qubits, oracle, execution, num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle,", "- connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected def parity(self, circuit, quantum_r,", "logger.critical('Error getting backend status, retrying...') sleep(900) continue except ValueError: logger.critical('Backend is not available,", "Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming execution') try: result = Q_program.execute([\"envariance\"], backend=device,", "plain_map): self.__path.update({start: -1}) to_connect = [start] max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max))", "loop: %s', str(connected[0])) logger.log(logging.VERBOSE, 'launch_exp() - sorted_v in 1st for loop: %s', str(sorted_v))", "'_' + str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') #", "in ranks: ranks.update({next: 0}) ranks[next] = ranks[next] + 1 self.explore(source, next, visited, ranks)", "cnot gates based on the path created in create_path method def place_cx(self, circuit,", "self.__n_qubits for qubit in self.__path: if count <= 0: break self.__connected.update({qubit: self.__path[qubit]}) count", "classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\") logger.debug('launch_exp() - QASM:\\n%s', str(QASM_source)) while True: try: backend_status", "= Q_program.create_circuit('parity', [quantum_r], [classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source =", "device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s !', device)", "filename = directory + device + '/' + oracle + '/' + 'execution'", "%s !', device) exit(2) elif device == online_sim: if n_qubits <= 5: size", "max = len(self.__coupling_map) logger.debug('create_path() - max:\\n%s', str(max)) count = max - 1 changed", "directory + device + '/' + oracle + '/' + 'execution' + str(", "device == qx3 or device == qx5: if n_qubits <= 16: size =", "connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected # launch envariance experiment on", "============================================================================= __author__ = \"<NAME>\" __copyright__ = \"Copyright 2017, Quantum Information Science, University of", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "elif oracle == '10': logger.log(logging.VERBOSE, 'place_cx() - oracle = 10') if stop >", "qx5: if n_qubits <= 16: size = 16 # device = 'ibmqx_qasm_simulator' else:", "== qx4: if n_qubits <= 5: size = 5 # device = 'ibmqx_qasm_simulator'", "initial is True: if x is True: circuit.x(quantum_r[qubit]) else: circuit.h(quantum_r[qubit]) # place Pauli-X", "+ str( n_qubits) + '_qubits_envariance.txt' os.makedirs(os.path.dirname(filename), exist_ok=True) out_f = open(filename, 'w') # store", "---- Waiting for credits to replenish...', n_qubits, oracle, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] <", "oredred_q:\\n%s', str(connected)) stop = n_qubits // 2 for i in sorted_c: reverse =", "device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c = sorted(counts.items(),", "reverse = i[0][::-1] logger.log(logging.VERBOSE, 'launch_exp() - reverse in for 1st loop: %s', str(reverse))", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "key=operator.itemgetter(0)) connected = list(zip(*sorted_c))[0] logger.debug('envariance() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return", "use like a global. \"../qiskit-sdk-py\") from qiskit import QuantumProgram import Qconfig logger =", "# create a valid path that connect qubits used in the circuit def", "%d - Execution %d - Shots %d ---- Waiting for credits to replenish...',", "= dict() self.__path = dict() self.__n_qubits = 0 self.__ranks = dict() self.__connected =", "num_shots) parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts))", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "max:\\n%s', str(max)) count = max - 1 changed = True visiting = 0", "= Q_program.create_circuit(\"envariance\", [quantum_r], [classical_r]) connected = utility.envariance(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits) QASM_source = Q_program.get_qasm(\"envariance\")", "not in self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if", "waiting...') sleep(900) continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Oracle %s", "def envariance(self, circuit, quantum_r, classical_r, n_qubits): self.create(circuit, quantum_r, classical_r, n_qubits) sorted_c = sorted(self.__connected.items(),", "place_h(self, circuit, start, quantum_r, initial=True, x=True): for qubit in self.__connected: if qubit !=", "devices import * import logging import myLogger import operator import sys sys.path.append( #", "qubits', str(max_qubits)) exit(2) count = self.__n_qubits for qubit in self.__path: if count <=", "to replenish...', n_qubits, execution, num_shots) while Q_program.get_api().get_my_credits()['remaining'] < 3: sleep(900) logger.critical('Credits replenished, resuming", "ranks[next] + 1 self.explore(source, next, visited, ranks) # TODO Try using some sort", "self.__connected[qubit]) stop -= 1 # place Hadamard gates def place_h(self, circuit, start, quantum_r,", "inverse-cnot: (%s, %s)', str(control), str(target)) circuit.h(control_qubit) circuit.h(target_qubit) circuit.cx(target_qubit, control_qubit) circuit.h(control_qubit) circuit.h(target_qubit) else: logger.critical('cx()", "1 logger.debug('create_path() - path:\\n%s', str(self.__path)) def cx(self, circuit, control_qubit, target_qubit, control, target): if", "next not in visited[source]: visited[source].append(next) if next not in ranks: ranks.update({next: 0}) ranks[next]", "Oracle %s - Execution %d - Queries %d', n_qubits, oracle, execution, num_shots) parity_exec(execution,", "2nd for loop: %s%s', str(connected[n + stop + 1]), str(sorted_v[n + 2])) value", "except in compliance with the License. # You may obtain a copy of", "continue break if Q_program.get_api().get_my_credits()['remaining'] < 3: logger.critical('Qubits %d - Oracle %s - Execution", "= dict() self.__most_connected = [] if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() -", "for v in sorted_v) results.update({value: i[1]}) out_f.write(value + '\\t' + str(i[1]) + '\\n')", "= 16 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for", "gates def place_h(self, circuit, start, quantum_r, initial=True, x=True): for qubit in self.__connected: if", "and API url except ConnectionError: sleep(900) logger.critical('API Exception occurred, retrying\\nQubits %d - Execution", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "for i in sorted_c: reverse = i[0][::-1] sorted_v = [] for n in", "in self.__coupling_map[visiting]: if next not in visited[source]: visited[source].append(next) if next not in ranks:", "backend_status['busy'] is True): logger.critical('%s currently offline, waiting...', device) while Q_program.get_backend_status(device)['available'] is False: sleep(1800)", "if n_qubits <= 16: size = 16 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp()", "'10': logger.log(logging.VERBOSE, 'place_cx() - oracle = 10') if stop > 0: self.cx(circuit, quantum_r[qubit],", "5 # device = 'ibmqx_qasm_simulator' else: logger.critical('launch_exp() - Too much qubits for %s", "the given device def parity_exec(execution, device, utility, n_qubits, oracle='11', num_shots=1024, directory='Data_Parity/'): os.makedirs(os.path.dirname(directory), exist_ok=True)", "def envariance_exec(execution, device, utility, n_qubits, num_shots=1024, directory='Data_Envariance/'): os.makedirs(os.path.dirname(directory), exist_ok=True) size = 0 results", "relative dependencies if you clone QISKit from the Git repo and use like", "coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s', str(self.__coupling_map)) self.invert_graph(coupling_map, self.__inverse_coupling_map) logger.log(logging.DEBUG, 'init() - inverse coupling", "= \"2.0\" __email__ = \"<EMAIL>\" import os from time import sleep from devices", "start_explore(self, graph, ranks): visited = dict() for source in graph: visited.update({source: []}) self.explore(source,", "\"Copyright 2017, Quantum Information Science, University of Parma, Italy\" __license__ = \"Apache\" __version__", "1) != n_qubits: sorted_v.append(reverse[connected[n + stop + 1]]) logger.log(logging.VERBOSE, 'launch_exp() - connected[n+stop+1], sorted_v[n+2]", "up to %s qubits', str(max_qubits)) exit(2) count = self.__n_qubits for qubit in self.__path:", "timeout=1000, shots=num_shots, max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Execution %d", "connect qubits used in the circuit def create_path(self, start, plain_map): self.__path.update({start: -1}) to_connect", "list(self.__connected.keys()) logger.debug('parity() - connected:\\n%s', str(connected)) self.__n_qubits = 0 self.__connected.clear() return connected # launch", "num_shots) envariance_exec(execution, device, utility, n_qubits=n_qubits, num_shots=num_shots, directory=directory) return try: counts = result.get_counts(\"envariance\") except", "x=True): for qubit in self.__connected: if qubit != start: circuit.h(quantum_r[qubit]) else: if initial", "self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1 logger.debug('create_path() - path:\\n%s', str(self.__path)) if node not", "parity_exec(execution, device, utility, n_qubits=n_qubits, oracle=oracle, num_shots=num_shots, directory=directory) return logger.debug('launch_exp() - counts:\\n%s', str(counts)) sorted_c", "QuantumProgram() try: Q_program.set_api(Qconfig.APItoken, Qconfig.config[\"url\"]) # set the APIToken and API url except ConnectionError:", "- Execution %d - Shots %d ---- Waiting for credits to replenish...', n_qubits,", "self.__path[qubit]}) count -= 1 logger.debug('create() - connected:\\n%s', str(self.__connected)) self.place_h(circuit, self.__most_connected[0], quantum_r, x=x) self.place_cx(circuit,", "- max:\\n%s', str(max)) count = max - 1 changed = True visiting =", "<= 0: break for node in plain_map[to_connect[visiting]]: if count <= 0: break if", "<= 0: break if node not in self.__path: self.__path.update({node: to_connect[visiting]}) count -= 1", "= [] for n in range(n_qubits - stop): sorted_v.append(reverse[connected[n + stop]]) for n", "size = 16 else: logger.critical('launch_exp() - Unknown device.') exit(3) Q_program = QuantumProgram() try:", "max_credits=5) except Exception: sleep(900) logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s - Execution", "dict() self.__most_connected = [] if coupling_map: self.__coupling_map = coupling_map.copy() logger.log(logging.DEBUG, 'init() - coupling_map:\\n%s',", "[classical_r]) connected = utility.parity(circuit=circuit, quantum_r=quantum_r, classical_r=classical_r, n_qubits=n_qubits, oracle=oracle) QASM_source = Q_program.get_qasm('parity') logger.debug('launch_exp() -", "control, target): if target in self.__coupling_map[control]: logger.log(logging.VERBOSE, 'cx() - cnot: (%s, %s)', str(control),", "for further use @staticmethod def invert_graph(graph, inverse_graph=None): if inverse_graph is None: inverse_graph =", "qx2 or device == qx4: if n_qubits <= 5: size = 5 #", "0 while count > 0: logger.debug('create_path() - visiting:\\n%s - %s', str(visiting), str(to_connect[visiting])) #", "counts = result.get_counts('parity') except Exception: logger.critical('Exception occurred, retrying\\nQubits %d - Oracle %s -", "- N qubits: %s', str(self.__n_qubits)) logger.debug('create() - Max qubits: %s', str(max_qubits)) if max_qubits" ]
[ "migrations, models import ckeditor.fields class Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'), ] operations", "-*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models", "import migrations, models import ckeditor.fields class Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'), ]", "models import ckeditor.fields class Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'), ] operations =", "dependencies = [ ('survey', '0008_survey_image'), ] operations = [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This", "class Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'), ] operations = [ migrations.AlterField( model_name='survey',", "coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import", "= [ ('survey', '0008_survey_image'), ] operations = [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description", "unicode_literals from django.db import migrations, models import ckeditor.fields class Migration(migrations.Migration): dependencies = [", "import unicode_literals from django.db import migrations, models import ckeditor.fields class Migration(migrations.Migration): dependencies =", "[ ('survey', '0008_survey_image'), ] operations = [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is", "ckeditor.fields class Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'), ] operations = [ migrations.AlterField(", "migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show to the contributors.', verbose_name='description', blank=True), ),", "-*- from __future__ import unicode_literals from django.db import migrations, models import ckeditor.fields class", "model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show to the contributors.', verbose_name='description', blank=True), ), ]", "django.db import migrations, models import ckeditor.fields class Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'),", "[ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show to the contributors.', verbose_name='description', blank=True),", "import ckeditor.fields class Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'), ] operations = [", "= [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show to the contributors.', verbose_name='description',", "__future__ import unicode_literals from django.db import migrations, models import ckeditor.fields class Migration(migrations.Migration): dependencies", "from django.db import migrations, models import ckeditor.fields class Migration(migrations.Migration): dependencies = [ ('survey',", "from __future__ import unicode_literals from django.db import migrations, models import ckeditor.fields class Migration(migrations.Migration):", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations,", "'0008_survey_image'), ] operations = [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show to", "operations = [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show to the contributors.',", "Migration(migrations.Migration): dependencies = [ ('survey', '0008_survey_image'), ] operations = [ migrations.AlterField( model_name='survey', name='description',", "utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import ckeditor.fields", "('survey', '0008_survey_image'), ] operations = [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show", "] operations = [ migrations.AlterField( model_name='survey', name='description', field=ckeditor.fields.RichTextField(help_text='This description is show to the" ]
[ "username, groupname, user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' %", "return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def delete(src, files, user_pass): try: file_not_exists", "in file_list: file_to_copy = src + '/%s' % file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy)", "if not os.path.exists(dest): os.makedirs(dest) for file in file_list: file_to_copy = src + '/%s'", "% (user_pass, file_path)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not", "dest.endswith('/'): dest = dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for file in file_list: file_to_copy", "dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for file in file_list: file_to_copy = src +", "copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists = [] if src.endswith('/'): src", "% (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) if file_not_exists: return '{\"success\": 0,", "groupname, user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file,", "(user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) if file_not_exists: return '{\"success\": 0, \"msg\":", "log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def move_rename(src, file_list, dest, user_pass):", "time import run_services from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance log =", "dest.endswith('/'): dest = dest[:-1] for file in file_list: file_to_move = src+ '/%s' %", "from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username =", "mv %s %s\" % (user_pass, file_to_move, dest)) if file_not_exists: return '{\"success\": 0, \"msg\":", "not exists!!!]}' % file_not_exists elif existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\", file already", "'/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp -r %s %s\"", "[\"%s\", file does not exists!!!]}' % file_not_exists elif existing_file_list: return '{\"success\": 0, \"msg\":", "return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def move_rename(src, file_list, dest, user_pass): try:", "user_pass): try: file_not_exists = [] existing_file_list = [] if src.endswith('/'): src = src[:-1]", "os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s'", "file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists = [] if src.endswith('/'): src =", "%s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) else: unique_id =", "sudo -S mv %s %s\" % (user_pass, file_to_move, dest)) if file_not_exists: return '{\"success\":", "file dest_path = dest + '/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if", "'{\"success\": 0, \"msg\": [\"%s\"]}' % e def delete(src, files, user_pass): try: file_not_exists =", "dest + '/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move)", "1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e) return '{\"success\":", "dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp -r", "os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo", "e: log.error(\"Exception in copy_move_delete ==> move()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' %", "file_list: file_to_move = src+ '/%s' % file dest_path = dest + '/%s' %", "file does not exists!!!]}' % file_not_exists elif existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\",", "import time import run_services from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance log", "groupname = username def copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists =", "dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists = [] if src.endswith('/'): src = src[:-1]", "0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists elif existing_file_list: return '{\"success\":", "0, \"msg\": [\"%s\", file already exists!!!]}' % existing_file_list return '{\"success\": 1}' except Exception", "copy_move_delete ==> move()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def delete(src,", "file in files: file_path = src + '/%s' % file if not os.path.exists(file_path):", "dest[:-1] for file in file_list: file_to_move = src+ '/%s' % file dest_path =", "src = src[:-1] if dest.endswith('/'): dest = dest[:-1] for file in file_list: file_to_move", "copied_file)) chown(copied_file, username, groupname, user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file = dest +", "= getLoggingInstance() username = os.getenv(\"user\") groupname = username def copy(src, file_list, dest, user_pass,", "file_list, dest, user_pass): try: file_not_exists = [] existing_file_list = [] if src.endswith('/'): src", "os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo -S mv %s %s\" % (user_pass,", "os.makedirs(dest) for file in file_list: file_to_copy = src + '/%s' % file if", "file_path = src + '/%s' % file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s", "if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file = dest", "user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists = [] if src.endswith('/'): src = src[:-1] if", "\"msg\": [\"%s\"]}' % e def delete(src, files, user_pass): try: file_not_exists = [] if", "import chown from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username = os.getenv(\"user\") groupname", "def copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists = [] if src.endswith('/'):", "else: run_services.run_basic_services(\"echo %s | sudo -S mv %s %s\" % (user_pass, file_to_move, dest))", "log.info(\"\\nCopying\\n\") try: file_not_exists = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest", "\"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists elif existing_file_list: return '{\"success\": 0,", "-S cp %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) if", "'/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp %s %s\" %", "==> move()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def delete(src, files,", "file_not_exists = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1]", "if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists", "os.path.exists(dest): os.makedirs(dest) for file in file_list: file_to_copy = src + '/%s' % file", "move()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def delete(src, files, user_pass):", "\"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists return '{\"success\": 1}' except Exception", "1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> move()\") log.error(e) return '{\"success\":", "'/%s' % file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0]", "return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists elif existing_file_list:", "Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username = os.getenv(\"user\")", "src = src[:-1] if dest.endswith('/'): dest = dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for", "%s | sudo -S mv %s %s\" % (user_pass, file_to_move, dest)) if file_not_exists:", "copied_file)) chown(copied_file, username, groupname, user_pass) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file", "file already exists!!!]}' % existing_file_list return '{\"success\": 1}' except Exception as e: log.error(\"Exception", "os.getenv(\"user\") groupname = username def copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists", "log.error(\"Exception in copy_move_delete ==> move()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e", "= src[:-1] for file in files: file_path = src + '/%s' % file", "-S cp -r %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass)", "cp %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) if file_not_exists:", "'{\"success\": 0, \"msg\": [\"%s\", file already exists!!!]}' % existing_file_list return '{\"success\": 1}' except", "username, groupname, user_pass) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not", "\"msg\": [\"%s\", file already exists!!!]}' % existing_file_list return '{\"success\": 1}' except Exception as", "src[:-1] for file in files: file_path = src + '/%s' % file if", "%s\" % (user_pass, file_path)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does", "file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists return", "'{\"success\": 0, \"msg\": [\"%s\"]}' % e def move_rename(src, file_list, dest, user_pass): try: file_not_exists", "unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp %s %s\" % (user_pass, file_to_copy, copied_file))", "if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] for file in", "in files: file_path = src + '/%s' % file if not os.path.exists(file_path): file_not_exists.append(file_path)", "+ '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp %s %s\"", "dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp %s", "'{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists return '{\"success\": 1}'", "| sudo -S cp %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname,", "copy_move_delete ==> copy()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def move_rename(src,", "e def delete(src, files, user_pass): try: file_not_exists = [] if src.endswith('/'): src =", "= src[:-1] if dest.endswith('/'): dest = dest[:-1] for file in file_list: file_to_move =", "[\"%s\"]}' % e def delete(src, files, user_pass): try: file_not_exists = [] if src.endswith('/'):", "file in file_list: file_to_move = src+ '/%s' % file dest_path = dest +", "= [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] for", "src.endswith('/'): src = src[:-1] for file in files: file_path = src + '/%s'", "cp -r %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) else:", "src[:-1] if dest.endswith('/'): dest = dest[:-1] for file in file_list: file_to_move = src+", "% file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file", "except Exception as e: log.error(\"Exception in copy_move_delete ==> move()\") log.error(e) return '{\"success\": 0,", "if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo -S mv %s %s\"", "bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username = os.getenv(\"user\") groupname = username def", "username def copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists = [] if", "= dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for file in file_list: file_to_copy = src", "[\"%s\", file does not exists!!!]}' % file_not_exists return '{\"success\": 1}' except Exception as", "move_rename(src, file_list, dest, user_pass): try: file_not_exists = [] existing_file_list = [] if src.endswith('/'):", "'{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e) return", "\"msg\": [\"%s\"]}' % e def move_rename(src, file_list, dest, user_pass): try: file_not_exists = []", "0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists return '{\"success\": 1}' except", "-rf %s\" % (user_pass, file_path)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file", "from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username = os.getenv(\"user\") groupname = username", "chown from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username = os.getenv(\"user\") groupname =", "-r %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) else: unique_id", "run_services.run_basic_services(\"echo %s | sudo -S cp %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file,", "src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] for file in file_list:", "in copy_move_delete ==> move()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def", "%s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) if file_not_exists: return", "[\"%s\", file already exists!!!]}' % existing_file_list return '{\"success\": 1}' except Exception as e:", "file_to_copy = src + '/%s' % file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if", "'{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> move()\") log.error(e) return", "e def move_rename(src, file_list, dest, user_pass): try: file_not_exists = [] existing_file_list = []", "file_list: file_to_copy = src + '/%s' % file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else:", "file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists elif", "for file in file_list: file_to_copy = src + '/%s' % file if not", "copied_file = dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S", "exists!!!]}' % file_not_exists elif existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\", file already exists!!!]}'", "sudo -S cp %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass)", "file_not_exists return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> copy()\")", "file_not_exists return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> move()\")", "try: file_not_exists = [] if src.endswith('/'): src = src[:-1] for file in files:", "'/%s' % file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S rm", "as e: log.error(\"Exception in copy_move_delete ==> move()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}'", "does not exists!!!]}' % file_not_exists return '{\"success\": 1}' except Exception as e: log.error(\"Exception", "= [] if src.endswith('/'): src = src[:-1] for file in files: file_path =", "*args): log.info(\"\\nCopying\\n\") try: file_not_exists = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'):", "not os.path.exists(dest): os.makedirs(dest) for file in file_list: file_to_copy = src + '/%s' %", "==> copy()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def move_rename(src, file_list,", "[\"%s\"]}' % e def move_rename(src, file_list, dest, user_pass): try: file_not_exists = [] existing_file_list", "= dest[:-1] for file in file_list: file_to_move = src+ '/%s' % file dest_path", "= src+ '/%s' % file dest_path = dest + '/%s' % file if", "(user_pass, file_to_move, dest)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not", "src + '/%s' % file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo", "= username def copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try: file_not_exists = []", "e: log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' %", "'/%s' % file dest_path = dest + '/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path)", "delete(src, files, user_pass): try: file_not_exists = [] if src.endswith('/'): src = src[:-1] for", "= dest + '/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move):", "src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] if not os.path.exists(dest): os.makedirs(dest)", "user_pass) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' %", "unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp -r %s %s\" % (user_pass, file_to_copy,", "import run_services from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance()", "file does not exists!!!]}' % file_not_exists return '{\"success\": 1}' except Exception as e:", "try: file_not_exists = [] existing_file_list = [] if src.endswith('/'): src = src[:-1] if", "[] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] if not", "= src + '/%s' % file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s |", "return '{\"success\": 0, \"msg\": [\"%s\", file already exists!!!]}' % existing_file_list return '{\"success\": 1}'", "run_services.run_basic_services(\"echo %s | sudo -S mv %s %s\" % (user_pass, file_to_move, dest)) if", "% file dest_path = dest + '/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue", "src + '/%s' % file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id", "Exception as e: log.error(\"Exception in copy_move_delete ==> move()\") log.error(e) return '{\"success\": 0, \"msg\":", "import getLoggingInstance log = getLoggingInstance() username = os.getenv(\"user\") groupname = username def copy(src,", "-S rm -rf %s\" % (user_pass, file_path)) if file_not_exists: return '{\"success\": 0, \"msg\":", "file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S rm -rf %s\" % (user_pass, file_path)) if", "run_services from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance log = getLoggingInstance() username", "'{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists elif existing_file_list: return", "if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S rm -rf %s\" %", "file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s |", "chown(copied_file, username, groupname, user_pass) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does", "dest = dest[:-1] for file in file_list: file_to_move = src+ '/%s' % file", "continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo -S mv %s", "file_to_move = src+ '/%s' % file dest_path = dest + '/%s' % file", "copy()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def move_rename(src, file_list, dest,", "def delete(src, files, user_pass): try: file_not_exists = [] if src.endswith('/'): src = src[:-1]", "0, \"msg\": [\"%s\"]}' % e def move_rename(src, file_list, dest, user_pass): try: file_not_exists =", "if src.endswith('/'): src = src[:-1] for file in files: file_path = src +", "in copy_move_delete ==> copy()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def", "sudo -S cp -r %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname,", "file_not_exists elif existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\", file already exists!!!]}' % existing_file_list", "if dest.endswith('/'): dest = dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for file in file_list:", "file_not_exists = [] existing_file_list = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'):", "does not exists!!!]}' % file_not_exists elif existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\", file", "'/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo", "username = os.getenv(\"user\") groupname = username def copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\")", "str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo", "unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s", "% (user_pass, file_to_move, dest)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does", "= src + '/%s' % file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy):", "dest = dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for file in file_list: file_to_copy =", "not exists!!!]}' % file_not_exists return '{\"success\": 1}' except Exception as e: log.error(\"Exception in", "if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo", "= src[:-1] if dest.endswith('/'): dest = dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for file", "+ '/%s' % file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S", "+ '/%s' % file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id =", "files, user_pass): try: file_not_exists = [] if src.endswith('/'): src = src[:-1] for file", "for file in files: file_path = src + '/%s' % file if not", "as e: log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}'", "dest, user_pass): try: file_not_exists = [] existing_file_list = [] if src.endswith('/'): src =", "file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo -S mv %s %s\" % (user_pass, file_to_move,", "[] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] for file", "file_not_exists = [] if src.endswith('/'): src = src[:-1] for file in files: file_path", "getLoggingInstance() username = os.getenv(\"user\") groupname = username def copy(src, file_list, dest, user_pass, *args):", "file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' %", "(file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp %s %s\" % (user_pass, file_to_copy,", "file_to_move, dest)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}'", "| sudo -S cp -r %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username,", "| sudo -S rm -rf %s\" % (user_pass, file_path)) if file_not_exists: return '{\"success\":", "file in file_list: file_to_copy = src + '/%s' % file if not os.path.exists(file_to_copy):", "run_services.run_basic_services(\"echo %s | sudo -S cp -r %s %s\" % (user_pass, file_to_copy, copied_file))", "groupname, user_pass) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}'", "return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' % file_not_exists return '{\"success\":", "% file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s", "% (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp -r %s %s\" %", "file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\",", "= [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] if", "if dest.endswith('/'): dest = dest[:-1] for file in file_list: file_to_move = src+ '/%s'", "existing_file_list = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1]", "elif existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\", file already exists!!!]}' % existing_file_list return", "rm -rf %s\" % (user_pass, file_path)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\",", "-S mv %s %s\" % (user_pass, file_to_move, dest)) if file_not_exists: return '{\"success\": 0,", "os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo -S", "(user_pass, file_path)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}'", "if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file, unique_id)", "%s\" % (user_pass, file_to_move, dest)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file", "(user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file =", "exists!!!]}' % file_not_exists return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete", "already exists!!!]}' % existing_file_list return '{\"success\": 1}' except Exception as e: log.error(\"Exception in", "src[:-1] if dest.endswith('/'): dest = dest[:-1] if not os.path.exists(dest): os.makedirs(dest) for file in", "os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S rm -rf %s\" % (user_pass, file_path))", "user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file, unique_id)", "not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file = dest +", "% file_not_exists elif existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\", file already exists!!!]}' %", "% e def delete(src, files, user_pass): try: file_not_exists = [] if src.endswith('/'): src", "log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e def delete(src, files, user_pass): try:", "= [] existing_file_list = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest", "0, \"msg\": [\"%s\"]}' % e def delete(src, files, user_pass): try: file_not_exists = []", "def move_rename(src, file_list, dest, user_pass): try: file_not_exists = [] existing_file_list = [] if", "return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e)", "try: file_not_exists = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest =", "for file in file_list: file_to_move = src+ '/%s' % file dest_path = dest", "+ '/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else:", "+ '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp -r %s", "else: unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo", "%s | sudo -S rm -rf %s\" % (user_pass, file_path)) if file_not_exists: return", "in file_list: file_to_move = src+ '/%s' % file dest_path = dest + '/%s'", "| sudo -S mv %s %s\" % (user_pass, file_to_move, dest)) if file_not_exists: return", "= os.getenv(\"user\") groupname = username def copy(src, file_list, dest, user_pass, *args): log.info(\"\\nCopying\\n\") try:", "exists!!!]}' % existing_file_list return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete", "%s %s\" % (user_pass, file_to_move, dest)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\",", "run_services.run_basic_services(\"echo %s | sudo -S rm -rf %s\" % (user_pass, file_path)) if file_not_exists:", "existing_file_list: return '{\"success\": 0, \"msg\": [\"%s\", file already exists!!!]}' % existing_file_list return '{\"success\":", "% existing_file_list return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==>", "not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S rm -rf %s\" % (user_pass,", "src = src[:-1] for file in files: file_path = src + '/%s' %", "% file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S rm -rf", "import os import time import run_services from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import", "(file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp -r %s %s\" % (user_pass,", "%s | sudo -S cp -r %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file,", "chown(copied_file, username, groupname, user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s'", "% (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file", "return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> move()\") log.error(e)", "dest_path = dest + '/%s' % file if os.path.exists(dest_path): existing_file_list.append(dest_path) continue if not", "os import time import run_services from Basic_linux_commands.chown_chmod import chown from bigdata_logs.logger import getLoggingInstance", "src+ '/%s' % file dest_path = dest + '/%s' % file if os.path.exists(dest_path):", "file_path)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' %", "%s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) else: unique_id = str(time.time()).split('.')[0]", "not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo -S mv %s %s\" %", "= dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp", "existing_file_list return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==> move()\")", "sudo -S rm -rf %s\" % (user_pass, file_path)) if file_not_exists: return '{\"success\": 0,", "user_pass): try: file_not_exists = [] if src.endswith('/'): src = src[:-1] for file in", "file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo %s | sudo -S rm -rf %s\"", "% (file, unique_id) run_services.run_basic_services(\"echo %s | sudo -S cp %s %s\" % (user_pass,", "else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file,", "% file_not_exists return '{\"success\": 1}' except Exception as e: log.error(\"Exception in copy_move_delete ==>", "Exception as e: log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e) return '{\"success\": 0, \"msg\":", "existing_file_list.append(dest_path) continue if not os.path.exists(file_to_move): file_not_exists.append(file_to_move) else: run_services.run_basic_services(\"echo %s | sudo -S mv", "[] if src.endswith('/'): src = src[:-1] for file in files: file_path = src", "%s | sudo -S cp %s %s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username,", "log = getLoggingInstance() username = os.getenv(\"user\") groupname = username def copy(src, file_list, dest,", "except Exception as e: log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e) return '{\"success\": 0,", "file if not os.path.exists(file_to_copy): file_not_exists.append(file_to_copy) else: if os.path.isdir(file_to_copy): unique_id = str(time.time()).split('.')[0] copied_file =", "= str(time.time()).split('.')[0] copied_file = dest + '/%s_%s' % (file, unique_id) run_services.run_basic_services(\"echo %s |", "[] existing_file_list = [] if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest =", "if src.endswith('/'): src = src[:-1] if dest.endswith('/'): dest = dest[:-1] if not os.path.exists(dest):", "getLoggingInstance log = getLoggingInstance() username = os.getenv(\"user\") groupname = username def copy(src, file_list,", "file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) else: unique_id = str(time.time()).split('.')[0] copied_file = dest", "files: file_path = src + '/%s' % file if not os.path.exists(file_path): file_not_exists.append(file_path) run_services.run_basic_services(\"echo", "% e def move_rename(src, file_list, dest, user_pass): try: file_not_exists = [] existing_file_list =", "log.error(\"Exception in copy_move_delete ==> copy()\") log.error(e) return '{\"success\": 0, \"msg\": [\"%s\"]}' % e", "dest)) if file_not_exists: return '{\"success\": 0, \"msg\": [\"%s\", file does not exists!!!]}' %", "%s\" % (user_pass, file_to_copy, copied_file)) chown(copied_file, username, groupname, user_pass) if file_not_exists: return '{\"success\":" ]
[ "settings from testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL =", "from dj_database_url import parse from django.conf import settings from testing.postgresql import Postgresql postgres", "Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] = parse(_POSTGRESQL.url())", "Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def", "import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True)", "pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] = parse(_POSTGRESQL.url()) settings.DATABASES[\"dashboard\"] = parse(_POSTGRESQL.url()) def", "= os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser,", "= Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] =", "args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] = parse(_POSTGRESQL.url()) settings.DATABASES[\"dashboard\"] = parse(_POSTGRESQL.url()) def pytest_unconfigure(config): _POSTGRESQL.stop()", "parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] = parse(_POSTGRESQL.url()) settings.DATABASES[\"dashboard\"] = parse(_POSTGRESQL.url()) def pytest_unconfigure(config):", "postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config,", "pytest from dj_database_url import parse from django.conf import settings from testing.postgresql import Postgresql", "parse from django.conf import settings from testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb", "os import pytest from dj_database_url import parse from django.conf import settings from testing.postgresql", "initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] = parse(_POSTGRESQL.url()) settings.DATABASES[\"dashboard\"]", "import pytest from dj_database_url import parse from django.conf import settings from testing.postgresql import", "initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"]", "import os import pytest from dj_database_url import parse from django.conf import settings from", "from django.conf import settings from testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb =", "_POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"]", "import settings from testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL", "def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] = parse(_POSTGRESQL.url()) settings.DATABASES[\"dashboard\"] = parse(_POSTGRESQL.url())", "os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args):", "= os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] =", "django.conf import settings from testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\")", "from testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres,", "@pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\") settings.DATABASES[\"default\"] = parse(_POSTGRESQL.url()) settings.DATABASES[\"dashboard\"] =", "import parse from django.conf import settings from testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\")", "dj_database_url import parse from django.conf import settings from testing.postgresql import Postgresql postgres =", "testing.postgresql import Postgresql postgres = os.environ.get(\"POSTGRESQL_PATH\") initdb = os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb)", "os.environ.get(\"INITDB_PATH\") _POSTGRESQL = Postgresql(postgres=postgres, initdb=initdb) @pytest.hookimpl(tryfirst=True) def pytest_load_initial_conftests(early_config, parser, args): os.environ[\"DJANGO_SETTINGS_MODULE\"] = early_config.getini(\"DJANGO_SETTINGS_MODULE\")" ]
[]
[ "read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy version') d1 = np.dot(weights_1,", "for run in range(num_runs): global cur_times cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times) #", "# Print IR representation with open('mlp.eva', 'rb') as f, open('mlp.txt', 'w') as g:", "= time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1)", "under the MIT license. from eva import EvaProgram, Input, Output, evaluate, save, load", "for k in range(n): r[k] = matrix[k % m][(k + d) % n]", "d2 + bias_2.tolist() act2 = d2 * d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60)", "time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs,", "range(num_runs): global cur_times cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output the benchmarking", "= np.random.rand(layer2_units) - 0.5 def diag(matrix, d): m, n = matrix.shape r =", "CKKSCompiler from eva.seal import generate_keys from eva.metric import valuation_mse import os import time", "layer1_units = 32 layer2_units = 16 # Fix seed so we can compare", "print('Compile time') mlp = EvaProgram('NN (MLP)', vec_size=32 * 32) with mlp: image =", "encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter()", "image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time') params = load('mlp.evaparams') t0 = time.perf_counter() public_ctx,", "} def delta_ms(t0, t1): return round(1000 * abs(t0 - t1)) all_times = []", "plaintext version') mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key", "vec_size=32 * 32) with mlp: image = Input('input_0') d1 = mvp(weights_1, image) d1", "time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client')", "load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption'] =", "reserved. # Licensed under the MIT license. from eva import EvaProgram, Input, Output,", "print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\")", "= compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR representation with", "print('MSE', valuation_mse(outputs, reference)) def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not", "d1 d2 = mvp(weights_2, act1) d2 = d2 + bias_2.tolist() act2 = d2", "numpy as np import pandas as pd from numpy import random import known_type_pb2", "mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams')", "are always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units) -", "valuation_mse import os import time import copy import math import numpy as np", "'w') as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def", "import copy import math import numpy as np import pandas as pd from", "num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None else 10 for run in", "import os import time import copy import math import numpy as np import", "image image = [0.5] * image_size # Considering images padded to 32x32 for", "{'output': [ref_result]} print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def main(): compile() num_runs", "zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units) - 0.5 def", "= { 'input_0': image } t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1", "global cur_times cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output the benchmarking results", "{'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time') params = load('mlp.evaparams') t0 = time.perf_counter()", "eva.seal import generate_keys from eva.metric import valuation_mse import os import time import copy", "{ 't_keygen': [], 't_input_encryption': [], 't_computation': [], 't_decryption': [] } def delta_ms(t0, t1):", "time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs,", "benchmarking results df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename", "d1 d2 = np.dot(weights_2, act1) d2 = d2 + bias_2 ref_result = d2", "output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME'] df.to_csv(output_filename, index=False) if", "[] cur_times = [] # Generate Data image_size = 32 * 32 layer1_units", "################################################ print('EVA plaintext version') mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output'])", "d1 = d1 + bias_1 act1 = d1 * d1 d2 = np.dot(weights_2,", "# Output the benchmarking results df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME'", "* d1 d2 = mvp(weights_2, act1) d2 = d2 + bias_2.tolist() act2 =", "= 32 * 32 layer1_units = 32 layer2_units = 16 # Fix seed", "# Generate Data image_size = 32 * 32 layer1_units = 32 layer2_units =", "if os.getenv(\"NUM_RUNS\") is not None else 10 for run in range(num_runs): global cur_times", "range(n): r[k] = matrix[k % m][(k + d) % n] return r def", "t += t << offset return t def compile(): print('Compile time') mlp =", "################################################# print('Key generation time') params = load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx =", "random.seed(0) # Input image image = [0.5] * image_size # Considering images padded", "image) d1 = d1 + bias_1 act1 = d1 * d1 d2 =", "two, we need to masking/padding here for i in range(log2_n_div_m): offset = n", "################################################# print('Runtime on server') mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals')", "import random import known_type_pb2 import eva_pb2 #################### # BENCHMARKING # #################### times =", "mlp, params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print", "# TODO: if n/m isn't a power of two, we need to masking/padding", "matrix[k % m][(k + d) % n] return r def mvp(ptxt_matrix, enc_vector): m,", "Fix seed so we can compare result in c++ more easily random.seed(0) #", "def compute(): ################################################ print('Numpy version') d1 = np.dot(weights_1, image) d1 = d1 +", "all_times.append(cur_times) # Output the benchmarking results df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if", "open('mlp.eva', 'rb') as f, open('mlp.txt', 'w') as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva", "n] return r def mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n", "= load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen']", "cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp = load('mlp.eva')", "t << offset return t def compile(): print('Compile time') mlp = EvaProgram('NN (MLP)',", "t = 0 for i in range(m): t += (enc_vector << i) *", "(MLP)', vec_size=32 * 32) with mlp: image = Input('input_0') d1 = mvp(weights_1, image)", "for easier math weights_1 = np.random.rand(layer1_units, image_size) - 0.5 bias_1 = np.random.rand(layer1_units) -", "compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None else 10 for run", "10 for run in range(num_runs): global cur_times cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times)", "random import known_type_pb2 import eva_pb2 #################### # BENCHMARKING # #################### times = {", "np.dot(weights_1, image) d1 = d1 + bias_1 act1 = d1 * d1 d2", "on server') mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 =", "+ d) % n] return r def mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape", "'mlp.sealsecret') ################################################# print('Runtime on client') signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs =", "signature) t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference = {'output': [ref_result]} print('Expected',", "pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME'] df.to_csv(output_filename, index=False)", "+ bias_1.tolist() act1 = d1 * d1 d2 = mvp(weights_2, act1) d2 =", "run in range(num_runs): global cur_times cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output", "= matrix.shape r = [0] * n for k in range(n): r[k] =", "= load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption']", "n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m, 2)) t = 0 for", "= delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx = load('mlp.sealsecret') encOutputs", "mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params,", "eva_pb2 #################### # BENCHMARKING # #################### times = { 't_keygen': [], 't_input_encryption': [],", "offset = n // (2 << i) t += t << offset return", "= delta_ms(t0, t1) reference = {'output': [ref_result]} print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs,", "- 0.5 bias_1 = np.random.rand(layer1_units) - 0.5 # Allowing 16 output classes, where", "easily random.seed(0) # Input image image = [0.5] * image_size # Considering images", "mlp: image = Input('input_0') d1 = mvp(weights_1, image) d1 = d1 + bias_1.tolist()", "return round(1000 * abs(t0 - t1)) all_times = [] cur_times = [] #", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT", "as pd from numpy import random import known_type_pb2 import eva_pb2 #################### # BENCHMARKING", "results df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename =", "'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client') signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic')", "g.write(str(read_eva)) def compute(): ################################################ print('Numpy version') d1 = np.dot(weights_1, image) d1 = d1", "act1 = d1 * d1 d2 = np.dot(weights_2, act1) d2 = d2 +", "save(signature, 'mlp.evasignature') # Print IR representation with open('mlp.eva', 'rb') as f, open('mlp.txt', 'w')", "t += (enc_vector << i) * diag(ptxt_matrix, i) # TODO: if n/m isn't", "= \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME'] df.to_csv(output_filename, index=False) if __name__", "import math import numpy as np import pandas as pd from numpy import", "'t_keygen': [], 't_input_encryption': [], 't_computation': [], 't_decryption': [] } def delta_ms(t0, t1): return", "= d2 * d2 print(ref_result) ################################################ print('EVA plaintext version') mlp = load('mlp.eva') eva_ptxt_version", "image_size # Considering images padded to 32x32 for easier math weights_1 = np.random.rand(layer1_units,", "print(cur_times) all_times.append(cur_times) # Output the benchmarking results df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\"", "license. from eva import EvaProgram, Input, Output, evaluate, save, load from eva.ckks import", "compare result in c++ more easily random.seed(0) # Input image image = [0.5]", "generation time') params = load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1", "# Fix seed so we can compare result in c++ more easily random.seed(0)", "weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units) - 0.5 def diag(matrix,", "(c) Microsoft Corporation. All rights reserved. # Licensed under the MIT license. from", "np.random.rand(layer1_units) - 0.5 # Allowing 16 output classes, where the 6 extra ones", "# Allowing 16 output classes, where the 6 extra ones are always zero/ignored", "= d2 + bias_2 ref_result = d2 * d2 print(ref_result) ################################################ print('EVA plaintext", "* diag(ptxt_matrix, i) # TODO: if n/m isn't a power of two, we", "compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR representation with open('mlp.eva',", "= time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on", "delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client') signature = load('mlp.evasignature')", "bias_2 = np.random.rand(layer2_units) - 0.5 def diag(matrix, d): m, n = matrix.shape r", "0.5 # Allowing 16 output classes, where the 6 extra ones are always", "Corporation. All rights reserved. # Licensed under the MIT license. from eva import", "mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m, 2)) t", "= mvp(weights_1, image) d1 = d1 + bias_1.tolist() act1 = d1 * d1", "* 32) with mlp: image = Input('input_0') d1 = mvp(weights_1, image) d1 =", "[], 't_decryption': [] } def delta_ms(t0, t1): return round(1000 * abs(t0 - t1))", "+ bias_2 ref_result = d2 * d2 print(ref_result) ################################################ print('EVA plaintext version') mlp", "image_size = 32 * 32 layer1_units = 32 layer2_units = 16 # Fix", "outputs) print('MSE', valuation_mse(outputs, reference)) def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is", "as np import pandas as pd from numpy import random import known_type_pb2 import", "import generate_keys from eva.metric import valuation_mse import os import time import copy import", "= [] cur_times = [] # Generate Data image_size = 32 * 32", "print('EVA plaintext version') mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) #################################################", "[] } def delta_ms(t0, t1): return round(1000 * abs(t0 - t1)) all_times =", "mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs", "act1) d2 = d2 + bias_2 ref_result = d2 * d2 print(ref_result) ################################################", "= 0 for i in range(m): t += (enc_vector << i) * diag(ptxt_matrix,", "= int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None else 10 for run in range(num_runs):", "version') d1 = np.dot(weights_1, image) d1 = d1 + bias_1 act1 = d1", "pd from numpy import random import known_type_pb2 import eva_pb2 #################### # BENCHMARKING #", "= load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time') params", "classes, where the 6 extra ones are always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units)", "# #################### times = { 't_keygen': [], 't_input_encryption': [], 't_computation': [], 't_decryption': []", "round(1000 * abs(t0 - t1)) all_times = [] cur_times = [] # Generate", "t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals')", "= np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units) - 0.5 def diag(matrix, d):", "as f, open('mlp.txt', 'w') as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program()", "= 16 # Fix seed so we can compare result in c++ more", "act1) d2 = d2 + bias_2.tolist() act2 = d2 * d2 Output('output', act2)", "math import numpy as np import pandas as pd from numpy import random", "= np.random.rand(layer1_units, image_size) - 0.5 bias_1 = np.random.rand(layer1_units) - 0.5 # Allowing 16", "ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m, 2)) t = 0 for i in", "Data image_size = 32 * 32 layer1_units = 32 layer2_units = 16 #", "the benchmarking results df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ:", "t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference = {'output': [ref_result]} print('Expected', reference)", "client') secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs,", "image) d1 = d1 + bias_1.tolist() act1 = d1 * d1 d2 =", "bias_1.tolist() act1 = d1 * d1 d2 = mvp(weights_2, act1) d2 = d2", "'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 =", "m, n = matrix.shape r = [0] * n for k in range(n):", "generate_keys from eva.metric import valuation_mse import os import time import copy import math", "# BENCHMARKING # #################### times = { 't_keygen': [], 't_input_encryption': [], 't_computation': [],", "Input('input_0') d1 = mvp(weights_1, image) d1 = d1 + bias_1.tolist() act1 = d1", "on client') secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs =", "n // (2 << i) t += t << offset return t def", "= EvaProgram('NN (MLP)', vec_size=32 * 32) with mlp: image = Input('input_0') d1 =", "- 0.5 # Allowing 16 output classes, where the 6 extra ones are", "delta_ms(t0, t1) reference = {'output': [ref_result]} print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference))", "masking/padding here for i in range(log2_n_div_m): offset = n // (2 << i)", "import eva_pb2 #################### # BENCHMARKING # #################### times = { 't_keygen': [], 't_input_encryption':", "= load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation']", "= d2 * d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler()", "need to masking/padding here for i in range(log2_n_div_m): offset = n // (2", "d2 * d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp,", "Output, evaluate, save, load from eva.ckks import CKKSCompiler from eva.seal import generate_keys from", "{ 'input_0': image } t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1 =", "eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy version') d1 = np.dot(weights_1, image) d1", "d): m, n = matrix.shape r = [0] * n for k in", "in c++ more easily random.seed(0) # Input image image = [0.5] * image_size", "delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx = load('mlp.sealsecret') encOutputs =", "} t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption'] =", "save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs", "0 for i in range(m): t += (enc_vector << i) * diag(ptxt_matrix, i)", "32) with mlp: image = Input('input_0') d1 = mvp(weights_1, image) d1 = d1", "= public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') #################################################", "load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] =", "= delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client') signature =", "signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs = { 'input_0': image } t0", "encInputs = public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals')", "reference = {'output': [ref_result]} print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def main():", "print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\"))", "[] # Generate Data image_size = 32 * 32 layer1_units = 32 layer2_units", "a power of two, we need to masking/padding here for i in range(log2_n_div_m):", "matrix.shape r = [0] * n for k in range(n): r[k] = matrix[k", "cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx = load('mlp.sealsecret')", "known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy version') d1", "params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR", "= np.dot(weights_2, act1) d2 = d2 + bias_2 ref_result = d2 * d2", "= math.ceil(math.log(n // m, 2)) t = 0 for i in range(m): t", "= generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret')", "open('mlp.txt', 'w') as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva))", "times = { 't_keygen': [], 't_input_encryption': [], 't_computation': [], 't_decryption': [] } def", "outputs = secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference =", "= {'output': [ref_result]} print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def main(): compile()", "from eva.metric import valuation_mse import os import time import copy import math import", "encOutputs = public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals')", "load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1 =", "on client') signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs = { 'input_0': image", "time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference = {'output': [ref_result]} print('Expected', reference) print('Got', outputs)", "mvp(weights_1, image) d1 = d1 + bias_1.tolist() act1 = d1 * d1 d2", "easier math weights_1 = np.random.rand(layer1_units, image_size) - 0.5 bias_1 = np.random.rand(layer1_units) - 0.5", "load from eva.ckks import CKKSCompiler from eva.seal import generate_keys from eva.metric import valuation_mse", "################################################# print('Runtime on client') signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs = {", "result in c++ more easily random.seed(0) # Input image image = [0.5] *", "import numpy as np import pandas as pd from numpy import random import", "def delta_ms(t0, t1): return round(1000 * abs(t0 - t1)) all_times = [] cur_times", "compute() print(cur_times) all_times.append(cur_times) # Output the benchmarking results df = pd.DataFrame(all_times) output_filename =", "= pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME'] df.to_csv(output_filename,", "t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime", "'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME'] df.to_csv(output_filename, index=False) if __name__ == \"__main__\": main()", "'t_decryption': [] } def delta_ms(t0, t1): return round(1000 * abs(t0 - t1)) all_times", "'input_0': image } t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1 = time.perf_counter()", "np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units) - 0.5 def diag(matrix, d): m,", "r[k] = matrix[k % m][(k + d) % n] return r def mvp(ptxt_matrix,", "os import time import copy import math import numpy as np import pandas", "= load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1", "= n // (2 << i) t += t << offset return t", "= load('mlp.sealpublic') inputs = { 'input_0': image } t0 = time.perf_counter() encInputs =", "= np.random.rand(layer1_units) - 0.5 # Allowing 16 output classes, where the 6 extra", "version') mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation", "np.dot(weights_2, act1) d2 = d2 + bias_2 ref_result = d2 * d2 print(ref_result)", "delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp = load('mlp.eva') public_ctx =", "= time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1)", "t def compile(): print('Compile time') mlp = EvaProgram('NN (MLP)', vec_size=32 * 32) with", "bias_1 act1 = d1 * d1 d2 = np.dot(weights_2, act1) d2 = d2", "= 32 layer2_units = 16 # Fix seed so we can compare result", "secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature)", "to masking/padding here for i in range(log2_n_div_m): offset = n // (2 <<", "m][(k + d) % n] return r def mvp(ptxt_matrix, enc_vector): m, n =", "<< offset return t def compile(): print('Compile time') mlp = EvaProgram('NN (MLP)', vec_size=32", "n/m isn't a power of two, we need to masking/padding here for i", "<< i) t += t << offset return t def compile(): print('Compile time')", "time') mlp = EvaProgram('NN (MLP)', vec_size=32 * 32) with mlp: image = Input('input_0')", "= evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time') params = load('mlp.evaparams') t0", "i) # TODO: if n/m isn't a power of two, we need to", "compiler = CKKSCompiler() mlp, params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature,", "[ref_result]} print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def main(): compile() num_runs =", "m, 2)) t = 0 for i in range(m): t += (enc_vector <<", "= time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx", "t0 = time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0,", "time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx =", "return r def mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n //", "diag(ptxt_matrix, i) # TODO: if n/m isn't a power of two, we need", "main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None else 10 for", "from eva.seal import generate_keys from eva.metric import valuation_mse import os import time import", "Microsoft Corporation. All rights reserved. # Licensed under the MIT license. from eva", "+ bias_2.tolist() act2 = d2 * d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60)", "'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs =", "[0] * n for k in range(n): r[k] = matrix[k % m][(k +", "d1 * d1 d2 = mvp(weights_2, act1) d2 = d2 + bias_2.tolist() act2", "- 0.5 bias_2 = np.random.rand(layer2_units) - 0.5 def diag(matrix, d): m, n =", "= load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs =", "All rights reserved. # Licensed under the MIT license. from eva import EvaProgram,", "eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time') params = load('mlp.evaparams')", "bias_2.tolist() act2 = d2 * d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler", "= np.dot(weights_1, image) d1 = d1 + bias_1 act1 = d1 * d1", "= delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp = load('mlp.eva') public_ctx", "\"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME'] df.to_csv(output_filename, index=False) if __name__ ==", "d2 print(ref_result) ################################################ print('EVA plaintext version') mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0':", "* d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params,", "import pandas as pd from numpy import random import known_type_pb2 import eva_pb2 ####################", "encInputs) t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on", "act1 = d1 * d1 d2 = mvp(weights_2, act1) d2 = d2 +", "client') signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs = { 'input_0': image }", "d2 = d2 + bias_2.tolist() act2 = d2 * d2 Output('output', act2) Output('output',", "reference)) def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None else", "def mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m, 2))", "t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client') signature = load('mlp.evasignature') public_ctx", "= [0] * n for k in range(n): r[k] = matrix[k % m][(k", "time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx,", "'t_computation': [], 't_decryption': [] } def delta_ms(t0, t1): return round(1000 * abs(t0 -", "save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR representation with open('mlp.eva', 'rb') as f,", "if n/m isn't a power of two, we need to masking/padding here for", "#################### times = { 't_keygen': [], 't_input_encryption': [], 't_computation': [], 't_decryption': [] }", "################################################ print('Numpy version') d1 = np.dot(weights_1, image) d1 = d1 + bias_1 act1", "ones are always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units)", "[0.5] * image_size # Considering images padded to 32x32 for easier math weights_1", "def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None else 10", "t1)) all_times = [] cur_times = [] # Generate Data image_size = 32", "d1 = mvp(weights_1, image) d1 = d1 + bias_1.tolist() act1 = d1 *", "<< i) * diag(ptxt_matrix, i) # TODO: if n/m isn't a power of", "= [0.5] * image_size # Considering images padded to 32x32 for easier math", "np.random.rand(layer2_units) - 0.5 def diag(matrix, d): m, n = matrix.shape r = [0]", "m, n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m, 2)) t = 0", "cur_times['t_decryption'] = delta_ms(t0, t1) reference = {'output': [ref_result]} print('Expected', reference) print('Got', outputs) print('MSE',", "bias_1 = np.random.rand(layer1_units) - 0.5 # Allowing 16 output classes, where the 6", "= d2 + bias_2.tolist() act2 = d2 * d2 Output('output', act2) Output('output', d1)", "extra ones are always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 =", "return t def compile(): print('Compile time') mlp = EvaProgram('NN (MLP)', vec_size=32 * 32)", "in range(m): t += (enc_vector << i) * diag(ptxt_matrix, i) # TODO: if", "'t_input_encryption': [], 't_computation': [], 't_decryption': [] } def delta_ms(t0, t1): return round(1000 *", "* image_size # Considering images padded to 32x32 for easier math weights_1 =", "= secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference = {'output':", "the 6 extra ones are always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5", "public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs)", "the MIT license. from eva import EvaProgram, Input, Output, evaluate, save, load from", "t1): return round(1000 * abs(t0 - t1)) all_times = [] cur_times = []", "= time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp", "we need to masking/padding here for i in range(log2_n_div_m): offset = n //", "(2 << i) t += t << offset return t def compile(): print('Compile", "t0 = time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0,", "from numpy import random import known_type_pb2 import eva_pb2 #################### # BENCHMARKING # ####################", "= d1 + bias_1.tolist() act1 = d1 * d1 d2 = mvp(weights_2, act1)", "offset return t def compile(): print('Compile time') mlp = EvaProgram('NN (MLP)', vec_size=32 *", "mlp = EvaProgram('NN (MLP)', vec_size=32 * 32) with mlp: image = Input('input_0') d1", "evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time') params = load('mlp.evaparams') t0 =", "seed so we can compare result in c++ more easily random.seed(0) # Input", "not None else 10 for run in range(num_runs): global cur_times cur_times = copy.copy(times)", "public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back", "0.5 bias_2 = np.random.rand(layer2_units) - 0.5 def diag(matrix, d): m, n = matrix.shape", "% m][(k + d) % n] return r def mvp(ptxt_matrix, enc_vector): m, n", "in range(log2_n_div_m): offset = n // (2 << i) t += t <<", "0.5 def diag(matrix, d): m, n = matrix.shape r = [0] * n", "image_size) - 0.5 bias_1 = np.random.rand(layer1_units) - 0.5 # Allowing 16 output classes,", "copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output the benchmarking results df = pd.DataFrame(all_times) output_filename", "n = matrix.shape r = [0] * n for k in range(n): r[k]", "public_ctx, secret_ctx = generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic')", "load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs = { 'input_0': image } t0 = time.perf_counter()", "Generate Data image_size = 32 * 32 layer1_units = 32 layer2_units = 16", "math.ceil(math.log(n // m, 2)) t = 0 for i in range(m): t +=", "params = load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1 = time.perf_counter()", "print(ref_result) ################################################ print('EVA plaintext version') mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image})", "print('Runtime on server') mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0", "(enc_vector << i) * diag(ptxt_matrix, i) # TODO: if n/m isn't a power", "d1 * d1 d2 = np.dot(weights_2, act1) d2 = d2 + bias_2 ref_result", "= eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy version') d1 = np.dot(weights_1, image)", "range(log2_n_div_m): offset = n // (2 << i) t += t << offset", "copy import math import numpy as np import pandas as pd from numpy", "is not None else 10 for run in range(num_runs): global cur_times cur_times =", "= ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m, 2)) t = 0 for i", "np.random.rand(layer1_units, image_size) - 0.5 bias_1 = np.random.rand(layer1_units) - 0.5 # Allowing 16 output", "Input image image = [0.5] * image_size # Considering images padded to 32x32", "[], 't_computation': [], 't_decryption': [] } def delta_ms(t0, t1): return round(1000 * abs(t0", "so we can compare result in c++ more easily random.seed(0) # Input image", "f, open('mlp.txt', 'w') as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value)", "% n] return r def mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape log2_n_div_m =", "import time import copy import math import numpy as np import pandas as", "= d1 + bias_1 act1 = d1 * d1 d2 = np.dot(weights_2, act1)", "= { 't_keygen': [], 't_input_encryption': [], 't_computation': [], 't_decryption': [] } def delta_ms(t0,", "to 32x32 for easier math weights_1 = np.random.rand(layer1_units, image_size) - 0.5 bias_1 =", "generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') #################################################", "for i in range(m): t += (enc_vector << i) * diag(ptxt_matrix, i) #", "t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client')", "2)) t = 0 for i in range(m): t += (enc_vector << i)", "def diag(matrix, d): m, n = matrix.shape r = [0] * n for", "Output the benchmarking results df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in", "image } t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption']", "IR representation with open('mlp.eva', 'rb') as f, open('mlp.txt', 'w') as g: read_kt =", "load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp,", "t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0,", "time.perf_counter() outputs = secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference", "range(m): t += (enc_vector << i) * diag(ptxt_matrix, i) # TODO: if n/m", "read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy version') d1 = np.dot(weights_1, image) d1 =", "load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time') params =", "r def mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m,", "* abs(t0 - t1)) all_times = [] cur_times = [] # Generate Data", "= CKKSCompiler() mlp, params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature')", "d1 + bias_1.tolist() act1 = d1 * d1 d2 = mvp(weights_2, act1) d2", "for i in range(log2_n_div_m): offset = n // (2 << i) t +=", "i) * diag(ptxt_matrix, i) # TODO: if n/m isn't a power of two,", "power of two, we need to masking/padding here for i in range(log2_n_div_m): offset", "import EvaProgram, Input, Output, evaluate, save, load from eva.ckks import CKKSCompiler from eva.seal", "= [] # Generate Data image_size = 32 * 32 layer1_units = 32", "= matrix[k % m][(k + d) % n] return r def mvp(ptxt_matrix, enc_vector):", "pandas as pd from numpy import random import known_type_pb2 import eva_pb2 #################### #", "Considering images padded to 32x32 for easier math weights_1 = np.random.rand(layer1_units, image_size) -", "numpy import random import known_type_pb2 import eva_pb2 #################### # BENCHMARKING # #################### times", "* 32 layer1_units = 32 layer2_units = 16 # Fix seed so we", "math weights_1 = np.random.rand(layer1_units, image_size) - 0.5 bias_1 = np.random.rand(layer1_units) - 0.5 #", "d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva')", "secret_ctx = generate_keys(params) t1 = time.perf_counter() cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx,", "time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp =", "None else 10 for run in range(num_runs): global cur_times cur_times = copy.copy(times) compute()", "ref_result = d2 * d2 print(ref_result) ################################################ print('EVA plaintext version') mlp = load('mlp.eva')", "#################### # BENCHMARKING # #################### times = { 't_keygen': [], 't_input_encryption': [], 't_computation':", "c++ more easily random.seed(0) # Input image image = [0.5] * image_size #", "inputs = { 'input_0': image } t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature)", "print('Numpy version') d1 = np.dot(weights_1, image) d1 = d1 + bias_1 act1 =", "i in range(log2_n_div_m): offset = n // (2 << i) t += t", "d2 = mvp(weights_2, act1) d2 = d2 + bias_2.tolist() act2 = d2 *", "* d2 print(ref_result) ################################################ print('EVA plaintext version') mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp,", "16 output classes, where the 6 extra ones are always zero/ignored weights_2 =", "public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime", "i) t += t << offset return t def compile(): print('Compile time') mlp", "from eva import EvaProgram, Input, Output, evaluate, save, load from eva.ckks import CKKSCompiler", "n for k in range(n): r[k] = matrix[k % m][(k + d) %", "print('Runtime on client') signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs = { 'input_0':", "'rb') as f, open('mlp.txt', 'w') as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva =", "d1 + bias_1 act1 = d1 * d1 d2 = np.dot(weights_2, act1) d2", "32 * 32 layer1_units = 32 layer2_units = 16 # Fix seed so", "i in range(m): t += (enc_vector << i) * diag(ptxt_matrix, i) # TODO:", "= known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy version')", "32 layer2_units = 16 # Fix seed so we can compare result in", "image = Input('input_0') d1 = mvp(weights_1, image) d1 = d1 + bias_1.tolist() act1", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT license.", "BENCHMARKING # #################### times = { 't_keygen': [], 't_input_encryption': [], 't_computation': [], 't_decryption':", "* n for k in range(n): r[k] = matrix[k % m][(k + d)", "<reponame>AlexanderViand/EVA # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the", "d1 = d1 + bias_1.tolist() act1 = d1 * d1 d2 = mvp(weights_2,", "representation with open('mlp.eva', 'rb') as f, open('mlp.txt', 'w') as g: read_kt = known_type_pb2.KnownType()", "save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client') signature = load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs", "[], 't_input_encryption': [], 't_computation': [], 't_decryption': [] } def delta_ms(t0, t1): return round(1000", "32 layer1_units = 32 layer2_units = 16 # Fix seed so we can", "6 extra ones are always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5 bias_2", "CKKSCompiler() mlp, params, signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') #", "save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client') signature = load('mlp.evasignature') public_ctx =", "= time.perf_counter() encInputs = public_ctx.encrypt(inputs, signature) t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1)", "import valuation_mse import os import time import copy import math import numpy as", "bias_2 ref_result = d2 * d2 print(ref_result) ################################################ print('EVA plaintext version') mlp =", "'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR representation with open('mlp.eva', 'rb') as f, open('mlp.txt',", "with mlp: image = Input('input_0') d1 = mvp(weights_1, image) d1 = d1 +", "os.getenv(\"NUM_RUNS\") is not None else 10 for run in range(num_runs): global cur_times cur_times", "= load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1", "- 0.5 def diag(matrix, d): m, n = matrix.shape r = [0] *", "cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output the benchmarking results df =", "// (2 << i) t += t << offset return t def compile():", "import known_type_pb2 import eva_pb2 #################### # BENCHMARKING # #################### times = { 't_keygen':", "compile(): print('Compile time') mlp = EvaProgram('NN (MLP)', vec_size=32 * 32) with mlp: image", "isn't a power of two, we need to masking/padding here for i in", "d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params, signature", "of two, we need to masking/padding here for i in range(log2_n_div_m): offset =", "################################################# print('Back on client') secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter()", "Allowing 16 output classes, where the 6 extra ones are always zero/ignored weights_2", "'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR representation with open('mlp.eva', 'rb') as", "= mvp(weights_2, act1) d2 = d2 + bias_2.tolist() act2 = d2 * d2", "signature = compiler.compile(mlp) save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR representation", "reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if", "as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute():", "save(mlp, 'mlp.eva') save(params, 'mlp.evaparams') save(signature, 'mlp.evasignature') # Print IR representation with open('mlp.eva', 'rb')", "d2 + bias_2 ref_result = d2 * d2 print(ref_result) ################################################ print('EVA plaintext version')", "= load('mlp.evasignature') public_ctx = load('mlp.sealpublic') inputs = { 'input_0': image } t0 =", "d) % n] return r def mvp(ptxt_matrix, enc_vector): m, n = ptxt_matrix.shape log2_n_div_m", "load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation'] =", "MIT license. from eva import EvaProgram, Input, Output, evaluate, save, load from eva.ckks", "= time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference = {'output': [ref_result]} print('Expected', reference) print('Got',", "evaluate, save, load from eva.ckks import CKKSCompiler from eva.seal import generate_keys from eva.metric", "all_times = [] cur_times = [] # Generate Data image_size = 32 *", "enc_vector): m, n = ptxt_matrix.shape log2_n_div_m = math.ceil(math.log(n // m, 2)) t =", "+= (enc_vector << i) * diag(ptxt_matrix, i) # TODO: if n/m isn't a", "log2_n_div_m = math.ceil(math.log(n // m, 2)) t = 0 for i in range(m):", "save(encOutputs, 'mlp_outputs.sealvals') ################################################# print('Back on client') secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0", "layer2_units = 16 # Fix seed so we can compare result in c++", "else 10 for run in range(num_runs): global cur_times cur_times = copy.copy(times) compute() print(cur_times)", "time') params = load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx = generate_keys(params) t1 =", "mlp = load('mlp.eva') eva_ptxt_version = evaluate(mlp, {'input_0': image}) print(eva_ptxt_version['output']) ################################################# print('Key generation time')", "we can compare result in c++ more easily random.seed(0) # Input image image", "= time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1)", "load('mlp.sealpublic') inputs = { 'input_0': image } t0 = time.perf_counter() encInputs = public_ctx.encrypt(inputs,", "32x32 for easier math weights_1 = np.random.rand(layer1_units, image_size) - 0.5 bias_1 = np.random.rand(layer1_units)", "act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params, signature = compiler.compile(mlp)", "0.5 bias_1 = np.random.rand(layer1_units) - 0.5 # Allowing 16 output classes, where the", "in range(num_runs): global cur_times cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output the", "int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None else 10 for run in range(num_runs): global", "# Considering images padded to 32x32 for easier math weights_1 = np.random.rand(layer1_units, image_size)", "t1) reference = {'output': [ref_result]} print('Expected', reference) print('Got', outputs) print('MSE', valuation_mse(outputs, reference)) def", "act2 = d2 * d2 Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler =", "always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units) - 0.5", "delta_ms(t0, t1): return round(1000 * abs(t0 - t1)) all_times = [] cur_times =", "can compare result in c++ more easily random.seed(0) # Input image image =", "Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params, signature = compiler.compile(mlp) save(mlp,", "+= t << offset return t def compile(): print('Compile time') mlp = EvaProgram('NN", "from eva.ckks import CKKSCompiler from eva.seal import generate_keys from eva.metric import valuation_mse import", "TODO: if n/m isn't a power of two, we need to masking/padding here", "eva.ckks import CKKSCompiler from eva.seal import generate_keys from eva.metric import valuation_mse import os", "d1 = np.dot(weights_1, image) d1 = d1 + bias_1 act1 = d1 *", "rights reserved. # Licensed under the MIT license. from eva import EvaProgram, Input,", "16 # Fix seed so we can compare result in c++ more easily", "Output('output', act2) Output('output', d1) mlp.set_output_ranges(60) mlp.set_input_scales(60) compiler = CKKSCompiler() mlp, params, signature =", "read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy", "# Input image image = [0.5] * image_size # Considering images padded to", "print(eva_ptxt_version['output']) ################################################# print('Key generation time') params = load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx", "signature) t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on", "encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1 = time.perf_counter()", "g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################", "secret_ctx.decrypt(encOutputs, signature) t1 = time.perf_counter() cur_times['t_decryption'] = delta_ms(t0, t1) reference = {'output': [ref_result]}", "df = pd.DataFrame(all_times) output_filename = \"mlp_eva.csv\" if 'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME']", "mvp(weights_2, act1) d2 = d2 + bias_2.tolist() act2 = d2 * d2 Output('output',", "print('Back on client') secret_ctx = load('mlp.sealsecret') encOutputs = load('mlp_outputs.sealvals') t0 = time.perf_counter() outputs", "d2 = np.dot(weights_2, act1) d2 = d2 + bias_2 ref_result = d2 *", "- t1)) all_times = [] cur_times = [] # Generate Data image_size =", "= d1 * d1 d2 = mvp(weights_2, act1) d2 = d2 + bias_2.tolist()", "padded to 32x32 for easier math weights_1 = np.random.rand(layer1_units, image_size) - 0.5 bias_1", "diag(matrix, d): m, n = matrix.shape r = [0] * n for k", "'mlp.evasignature') # Print IR representation with open('mlp.eva', 'rb') as f, open('mlp.txt', 'w') as", "server') mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter()", "cur_times['t_keygen'] = delta_ms(t0, t1) save(public_ctx, 'mlp.sealpublic') save(secret_ctx, 'mlp.sealsecret') ################################################# print('Runtime on client') signature", "cur_times cur_times = copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output the benchmarking results df", "= public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0, t1) save(encOutputs, 'mlp_outputs.sealvals') #################################################", "print('Key generation time') params = load('mlp.evaparams') t0 = time.perf_counter() public_ctx, secret_ctx = generate_keys(params)", "d2 * d2 print(ref_result) ################################################ print('EVA plaintext version') mlp = load('mlp.eva') eva_ptxt_version =", "Print IR representation with open('mlp.eva', 'rb') as f, open('mlp.txt', 'w') as g: read_kt", "here for i in range(log2_n_div_m): offset = n // (2 << i) t", "compute(): ################################################ print('Numpy version') d1 = np.dot(weights_1, image) d1 = d1 + bias_1", "in range(n): r[k] = matrix[k % m][(k + d) % n] return r", "public_ctx = load('mlp.sealpublic') inputs = { 'input_0': image } t0 = time.perf_counter() encInputs", "Licensed under the MIT license. from eva import EvaProgram, Input, Output, evaluate, save,", "layer1_units) - 0.5 bias_2 = np.random.rand(layer2_units) - 0.5 def diag(matrix, d): m, n", "t1 = time.perf_counter() cur_times['t_input_encryption'] = delta_ms(t0, t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server')", "image = [0.5] * image_size # Considering images padded to 32x32 for easier", "t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1 = time.perf_counter() cur_times['t_computation'] = delta_ms(t0,", "t1) save(encInputs, 'mlp_inputs.sealvals') ################################################# print('Runtime on server') mlp = load('mlp.eva') public_ctx = load('mlp.sealpublic')", "= d1 * d1 d2 = np.dot(weights_2, act1) d2 = d2 + bias_2", "import CKKSCompiler from eva.seal import generate_keys from eva.metric import valuation_mse import os import", "EvaProgram('NN (MLP)', vec_size=32 * 32) with mlp: image = Input('input_0') d1 = mvp(weights_1,", "// m, 2)) t = 0 for i in range(m): t += (enc_vector", "= Input('input_0') d1 = mvp(weights_1, image) d1 = d1 + bias_1.tolist() act1 =", "cur_times = [] # Generate Data image_size = 32 * 32 layer1_units =", "valuation_mse(outputs, reference)) def main(): compile() num_runs = int(os.getenv(\"NUM_RUNS\")) if os.getenv(\"NUM_RUNS\") is not None", "d2 = d2 + bias_2 ref_result = d2 * d2 print(ref_result) ################################################ print('EVA", "with open('mlp.eva', 'rb') as f, open('mlp.txt', 'w') as g: read_kt = known_type_pb2.KnownType() read_kt.ParseFromString(f.read())", "r = [0] * n for k in range(n): r[k] = matrix[k %", "output classes, where the 6 extra ones are always zero/ignored weights_2 = np.random.rand(layer2_units,", "EvaProgram, Input, Output, evaluate, save, load from eva.ckks import CKKSCompiler from eva.seal import", "load('mlp.sealpublic') encInputs = load('mlp_inputs.sealvals') t0 = time.perf_counter() encOutputs = public_ctx.execute(mlp, encInputs) t1 =", "k in range(n): r[k] = matrix[k % m][(k + d) % n] return", "# Licensed under the MIT license. from eva import EvaProgram, Input, Output, evaluate,", "read_kt.ParseFromString(f.read()) read_eva = eva_pb2.Program() read_eva.ParseFromString(read_kt.contents.value) g.write(str(read_eva)) def compute(): ################################################ print('Numpy version') d1 =", "eva.metric import valuation_mse import os import time import copy import math import numpy", "+ bias_1 act1 = d1 * d1 d2 = np.dot(weights_2, act1) d2 =", "weights_1 = np.random.rand(layer1_units, image_size) - 0.5 bias_1 = np.random.rand(layer1_units) - 0.5 # Allowing", "abs(t0 - t1)) all_times = [] cur_times = [] # Generate Data image_size", "def compile(): print('Compile time') mlp = EvaProgram('NN (MLP)', vec_size=32 * 32) with mlp:", "where the 6 extra ones are always zero/ignored weights_2 = np.random.rand(layer2_units, layer1_units) -", "known_type_pb2 import eva_pb2 #################### # BENCHMARKING # #################### times = { 't_keygen': [],", "more easily random.seed(0) # Input image image = [0.5] * image_size # Considering", "* d1 d2 = np.dot(weights_2, act1) d2 = d2 + bias_2 ref_result =", "save, load from eva.ckks import CKKSCompiler from eva.seal import generate_keys from eva.metric import", "time import copy import math import numpy as np import pandas as pd", "Input, Output, evaluate, save, load from eva.ckks import CKKSCompiler from eva.seal import generate_keys", "if 'OUTPUT_FILENAME' in os.environ: output_filename = os.environ['OUTPUT_FILENAME'] df.to_csv(output_filename, index=False) if __name__ == \"__main__\":", "images padded to 32x32 for easier math weights_1 = np.random.rand(layer1_units, image_size) - 0.5", "= copy.copy(times) compute() print(cur_times) all_times.append(cur_times) # Output the benchmarking results df = pd.DataFrame(all_times)", "eva import EvaProgram, Input, Output, evaluate, save, load from eva.ckks import CKKSCompiler from", "np import pandas as pd from numpy import random import known_type_pb2 import eva_pb2" ]
[ "is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf,", "= url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS':", "url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS':", "url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite',", "= None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD':", "url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT': None,", "'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA':", "url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your", "'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None,", "None, 'SCHEMA': None, 'USER': None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={},", "url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, {", "quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None,", "'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">')", "None, }) def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2',", "'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA':", "'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None,", "{ 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD': None, 'PORT': None,", "self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError,", "'NAME': '', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD':", "'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD': None, 'PORT': None, 'SCHEMA':", "cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b'])", "'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar',", "def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None,", "def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se')", "to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf,", "self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD': None, 'PORT':", "= url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '',", "'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:') def", "= { 'Doctest': url } class DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf,", "self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff =", "url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello':", "None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError,", "url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name))", "'PASSWORD': None, 'PORT': None, 'SCHEMA': None, 'USER': None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c')", "self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS':", "from django.test import TestCase from bananas import url __test__ = { 'Doctest': url", "'SCHEMA': 'tweetschema', 'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self):", "test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name", "from bananas import url __test__ = { 'Doctest': url } class DBURLTest(TestCase): def", "import TestCase from bananas import url __test__ = { 'Doctest': url } class", "sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}])", "url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST':", "urllib.parse import quote from django.test import TestCase from bananas import url __test__ =", "None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None, }) def", "'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', }) def", "4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def", "'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se',", "'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')),", "self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {},", "conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None,", "url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines',", "url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'),", "self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name =", "None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='')", "key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError,", "has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a',", "quote from django.test import TestCase from bananas import url __test__ = { 'Doctest':", "{}, 'PASSWORD': None, 'PORT': None, 'SCHEMA': None, 'USER': None, }) def test_resolve(self): url.register_engine('abc',", "'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c')", "= url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST': None, 'PORT':", "def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se',", "= quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER':", "conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST': None,", "'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None, }) def test_db_url(self):", "'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias", "'', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None,", "{'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is',", "self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf", "def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self):", "{ 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT':", "'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD': None, 'PORT': None, 'SCHEMA': None, 'USER':", "django.test import TestCase from bananas import url __test__ = { 'Doctest': url }", "'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None, }) def test_db_url(self): conf =", "test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c',", "'5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER':", "{'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', }) def test_alias(self):", "{ 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {},", "'?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD':", "self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis')", "'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a',", "'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA':", "{}, 'SCHEMA': None, 'PASSWORD': None, }) def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world')", "'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE':", "{ 'Doctest': url } class DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, {", "test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff", "class DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '',", "self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS':", "DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER':", "TestCase from bananas import url __test__ = { 'Doctest': url } class DBURLTest(TestCase):", "url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a',", "'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'},", "'^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path,", "url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def", "conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets',", "None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None, }) def test_db_url(self): conf", "bananas import url __test__ = { 'Doctest': url } class DBURLTest(TestCase): def test_sqlite_memory(self):", "}) def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST':", "}) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf =", "'tweetschema', 'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self): url.register_engine('abc',", "['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url", "name = quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite',", "'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine,", "'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to", "'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD': None, 'PORT': None, 'SCHEMA': None,", "'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url,", "no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b':", "__test__ = { 'Doctest': url } class DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://')", "url } class DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3',", "def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has", "'Doctest': url } class DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE':", "url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST': None, 'PORT': None,", "url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None)", "def test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE':", "'SCHEMA': None, 'USER': None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz')", "'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None, })", "'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None, }) def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema'", "'', 'PARAMS': {}, 'PASSWORD': None, 'PORT': None, 'SCHEMA': None, 'USER': None, }) def", "= url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT':", "'PASSWORD': None, }) def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE':", "test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME':", "None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD': None,", "None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None, }) def test_db_url(self): conf = url.database_conf_from_url(", "from urllib.parse import quote from django.test import TestCase from bananas import url __test__", "'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception,", "test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '', 'USER': None, 'HOST':", "'USER': None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self):", "'<Alias to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None", "'5monkeys.se', 'NAME': '', 'PARAMS': {}, 'PASSWORD': None, 'PORT': None, 'SCHEMA': None, 'USER': None,", "'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema',", "'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242, 'SCHEMA': 'tweetschema', 'USER': 'joar', })", "{ 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {},", "'NAME': '', 'PARAMS': {}, 'PASSWORD': None, 'PORT': None, 'SCHEMA': None, 'USER': None, })", "'^Your url is', url.parse_database_url, 'sqlite://:memory:') def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf =", "\"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, {", "test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no", "self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self): self.assertRaises(ValueError, url.parse_path, None) self.assertRaisesRegex(Exception, '^Your url is', url.parse_database_url, 'sqlite://:memory:')", "'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2', 'PORT': 4242,", "'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c') conf", "def test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3',", "'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD':", "None, 'PORT': None, 'SCHEMA': None, 'USER': None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError,", "'/var/db/tweets.sqlite', 'USER': None, 'HOST': None, 'PORT': None, 'PARAMS': {}, 'SCHEMA': None, 'PASSWORD': None,", "} class DBURLTest(TestCase): def test_sqlite_memory(self): conf = url.database_conf_from_url('sqlite://') self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME':", "}) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres", "conf = url.database_conf_from_url('abc://5monkeys.se') self.maxDiff = None self.assertDictEqual(conf, { 'ENGINE': 'a.b.c', 'HOST': '5monkeys.se', 'NAME':", "None, 'USER': None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve, cursor={}, key='xyz') def", "test_db_url_with_slashes(self): name = quote('/var/db/tweets.sqlite', safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME':", "self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'HOST': '5monkeys.se', 'NAME': 'tweets', 'PARAMS': {'hello': 'world'}, 'PASSWORD': 'hunter2',", "safe='') conf = url.database_conf_from_url('sqlite3:///{0}'.format(name)) self.assertDictEqual(conf, { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': '/var/db/tweets.sqlite', 'USER': None, 'HOST':", "'PORT': None, 'SCHEMA': None, 'USER': None, }) def test_resolve(self): url.register_engine('abc', 'a.b.c') self.assertRaises(KeyError, url.resolve,", "None, 'PASSWORD': None, }) def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf, {", "self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a')", "test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine,", "['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url', url.get_engine, 'a') url.register_engine('a', ['a', {'b': 'c'}]) self.assertEqual(url.get_engine('a+b'), 'c') def test_parse(self):", "<gh_stars>0 from urllib.parse import quote from django.test import TestCase from bananas import url", "'USER': 'joar', }) def test_alias(self): self.assertEqual(repr(url.Alias(target='x')), '<Alias to \"x\">') def test_register(self): url.register_engine('abc', 'a.b.c')", "import url __test__ = { 'Doctest': url } class DBURLTest(TestCase): def test_sqlite_memory(self): conf", "'SCHEMA': None, 'PASSWORD': None, }) def test_db_url(self): conf = url.database_conf_from_url( 'pgsql://joar:hunter2@5monkeys.se:4242/tweets/tweetschema' '?hello=world') self.assertDictEqual(conf,", "def test_get_engine(self): self.assertRaisesMessage(KeyError, 'postgres has no sub-engines', url.get_engine, 'postgres+psycopg2+postgis') url.register_engine('a', ['b']) self.assertRaisesRegex(ValueError, '^django-bananas\\.url',", "'PARAMS': {}, 'PASSWORD': None, 'PORT': None, 'SCHEMA': None, 'USER': None, }) def test_resolve(self):", "import quote from django.test import TestCase from bananas import url __test__ = {", "url __test__ = { 'Doctest': url } class DBURLTest(TestCase): def test_sqlite_memory(self): conf =" ]
[ "within the div #links respectively redirect to:download, reading and the uploader's profile chapter", "= chapter.find('div', class_='meta_r').text print('the latest one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links =", "print('Zip file in:', getcwd()) elif action == 'r': r_url = links[1][1] webbrowser.open(r_url) else:", "r = requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the first <div> tag with class", "requests import webbrowser from os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters", "urls as strings for i in range(len(links)): links[i] = str(links[i]) links[i] = links[i].split('\"')", "lists where the second element for each are urls #print(links) #visually: [[htmlstuff, url,", "this #without the use of regular expressions for 'http*' action = input('would you", "chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd()) elif action", "expressions for 'http*' action = input('would you like to download or read the", "is the latest chapter #there are 3 anchor tags <a> with 'href' attributes", "'href' attributes within the div #links respectively redirect to:download, reading and the uploader's", "\"\"\" Script that scrapes Jaimini's box website to retrieve when was the last", "with 'href' attributes within the div #links respectively redirect to:download, reading and the", "<a> with 'href' attributes within the div #links respectively redirect to:download, reading and", "for 'http*' action = input('would you like to download or read the chapter", "class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the latest one piece", "3 with links inside <a> tags #convert links' content into strings to use", "chapter.find('div', class_='meta_r').text print('the latest one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a')", "import BeautifulSoup import requests import webbrowser from os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/'", "are urls #print(links) #visually: [[htmlstuff, url, htmlstuff], [idem], [idem]] #This format has been", "get the urls as strings for i in range(len(links)): links[i] = str(links[i]) links[i]", "open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd()) elif action == 'r':", "'d': d_url = links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb')", "attributes within the div #links respectively redirect to:download, reading and the uploader's profile", "#convert links' content into strings to use split() to get the urls as", "browser or download it \"\"\" from bs4 import BeautifulSoup import requests import webbrowser", "print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of len 3 with links inside <a> tags", "when was the last One Piece chapter released to then ask you if", "len 3 with links inside <a> tags #convert links' content into strings to", "#print(links) #visually: [[htmlstuff, url, htmlstuff], [idem], [idem]] #This format has been consistently used,", "action == 'r': r_url = links[1][1] webbrowser.open(r_url) else: print('that was neither d nor", "want to read the chapter in your browser or download it \"\"\" from", "are 3 anchor tags <a> with 'href' attributes within the div #links respectively", "links[i] = links[i].split('\"') #the 3 items in links are now lists where the", "items in links are now lists where the second element for each are", "tags <a> with 'href' attributes within the div #links respectively redirect to:download, reading", "hardcode this #without the use of regular expressions for 'http*' action = input('would", "links inside <a> tags #convert links' content into strings to use split() to", "d_url = links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content)", "each are urls #print(links) #visually: [[htmlstuff, url, htmlstuff], [idem], [idem]] #This format has", "with class 'element' is the latest chapter #there are 3 anchor tags <a>", "to:download, reading and the uploader's profile chapter = soup.find('div', class_='element') chapter_title = chapter.find('div',", "chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of len 3 with links", "Piece chapter released to then ask you if you want to read the", "'element' is the latest chapter #there are 3 anchor tags <a> with 'href'", "BeautifulSoup import requests import webbrowser from os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website", "that scrapes Jaimini's box website to retrieve when was the last One Piece", "3 anchor tags <a> with 'href' attributes within the div #links respectively redirect", "the second element for each are urls #print(links) #visually: [[htmlstuff, url, htmlstuff], [idem],", "input('would you like to download or read the chapter [d/r]?') if action ==", "== 'd': d_url = links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip = open('%s' %chapter_title,", "os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r = requests.get(url).text soup", "to get the urls as strings for i in range(len(links)): links[i] = str(links[i])", "str(links[i]) links[i] = links[i].split('\"') #the 3 items in links are now lists where", "chapter in your browser or download it \"\"\" from bs4 import BeautifulSoup import", "now lists where the second element for each are urls #print(links) #visually: [[htmlstuff,", "htmlstuff], [idem], [idem]] #This format has been consistently used, which allows to hardcode", "respectively redirect to:download, reading and the uploader's profile chapter = soup.find('div', class_='element') chapter_title", "download or read the chapter [d/r]?') if action == 'd': d_url = links[0][1]", "elif action == 'r': r_url = links[1][1] webbrowser.open(r_url) else: print('that was neither d", "in:', getcwd()) elif action == 'r': r_url = links[1][1] webbrowser.open(r_url) else: print('that was", "= requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd())", "'r': r_url = links[1][1] webbrowser.open(r_url) else: print('that was neither d nor r, quitting...')", "import requests import webbrowser from os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with", "chapter [d/r]?') if action == 'd': d_url = links[0][1] print('Downloading...') r2 = requests.get(d_url)", "links[i].split('\"') #the 3 items in links are now lists where the second element", "was the last One Piece chapter released to then ask you if you", "import webbrowser from os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r", "it \"\"\" from bs4 import BeautifulSoup import requests import webbrowser from os import", "the chapter in your browser or download it \"\"\" from bs4 import BeautifulSoup", "range(len(links)): links[i] = str(links[i]) links[i] = links[i].split('\"') #the 3 items in links are", "#visually: [[htmlstuff, url, htmlstuff], [idem], [idem]] #This format has been consistently used, which", "if action == 'd': d_url = links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip =", "i in range(len(links)): links[i] = str(links[i]) links[i] = links[i].split('\"') #the 3 items in", "latest chapter #there are 3 anchor tags <a> with 'href' attributes within the", "ask you if you want to read the chapter in your browser or", "latest one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of len", "links[i] = str(links[i]) links[i] = links[i].split('\"') #the 3 items in links are now", "<a> tags #convert links' content into strings to use split() to get the", "profile chapter = soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text", "One Piece chapter released to then ask you if you want to read", "soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the latest one", "as strings for i in range(len(links)): links[i] = str(links[i]) links[i] = links[i].split('\"') #the", "retrieve when was the last One Piece chapter released to then ask you", "chapter_zip.close() print('Zip file in:', getcwd()) elif action == 'r': r_url = links[1][1] webbrowser.open(r_url)", "tag with class 'element' is the latest chapter #there are 3 anchor tags", "chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the latest one piece chapter is...\\n') print(chapter_title)", "links = chapter.find_all('a') #list of len 3 with links inside <a> tags #convert", "[[htmlstuff, url, htmlstuff], [idem], [idem]] #This format has been consistently used, which allows", "= soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the latest", "where the second element for each are urls #print(links) #visually: [[htmlstuff, url, htmlstuff],", "%chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd()) elif action == 'r': r_url", "released to then ask you if you want to read the chapter in", "in your browser or download it \"\"\" from bs4 import BeautifulSoup import requests", "like to download or read the chapter [d/r]?') if action == 'd': d_url", "[idem]] #This format has been consistently used, which allows to hardcode this #without", "requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd()) elif", "box website to retrieve when was the last One Piece chapter released to", "\"\"\" from bs4 import BeautifulSoup import requests import webbrowser from os import getcwd", "to use split() to get the urls as strings for i in range(len(links)):", "chapter.find_all('a') #list of len 3 with links inside <a> tags #convert links' content", "chapters r = requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the first <div> tag with", "chapter = soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the", "class_='meta_r').text print('the latest one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list", "into strings to use split() to get the urls as strings for i", "url, htmlstuff], [idem], [idem]] #This format has been consistently used, which allows to", "reading and the uploader's profile chapter = soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text", "the chapter [d/r]?') if action == 'd': d_url = links[0][1] print('Downloading...') r2 =", "for i in range(len(links)): links[i] = str(links[i]) links[i] = links[i].split('\"') #the 3 items", "for each are urls #print(links) #visually: [[htmlstuff, url, htmlstuff], [idem], [idem]] #This format", "#without the use of regular expressions for 'http*' action = input('would you like", "'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r = requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the first", "with chapters r = requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the first <div> tag", "links' content into strings to use split() to get the urls as strings", "the urls as strings for i in range(len(links)): links[i] = str(links[i]) links[i] =", "inside <a> tags #convert links' content into strings to use split() to get", "print('Downloading...') r2 = requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file", "requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the first <div> tag with class 'element' is", "use split() to get the urls as strings for i in range(len(links)): links[i]", "soup = BeautifulSoup(r, 'lxml') #the first <div> tag with class 'element' is the", "chapter released to then ask you if you want to read the chapter", "used, which allows to hardcode this #without the use of regular expressions for", "import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r = requests.get(url).text soup =", "= input('would you like to download or read the chapter [d/r]?') if action", "strings to use split() to get the urls as strings for i in", "chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd()) elif action == 'r': r_url = links[1][1]", "you want to read the chapter in your browser or download it \"\"\"", "and the uploader's profile chapter = soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date", "element for each are urls #print(links) #visually: [[htmlstuff, url, htmlstuff], [idem], [idem]] #This", "or read the chapter [d/r]?') if action == 'd': d_url = links[0][1] print('Downloading...')", "with links inside <a> tags #convert links' content into strings to use split()", "#links respectively redirect to:download, reading and the uploader's profile chapter = soup.find('div', class_='element')", "first <div> tag with class 'element' is the latest chapter #there are 3", "= open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd()) elif action ==", "to retrieve when was the last One Piece chapter released to then ask", "[d/r]?') if action == 'd': d_url = links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip", "to read the chapter in your browser or download it \"\"\" from bs4", "format has been consistently used, which allows to hardcode this #without the use", "<div> tag with class 'element' is the latest chapter #there are 3 anchor", "chapter #there are 3 anchor tags <a> with 'href' attributes within the div", "class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the latest one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n')", "has been consistently used, which allows to hardcode this #without the use of", "file in:', getcwd()) elif action == 'r': r_url = links[1][1] webbrowser.open(r_url) else: print('that", "last One Piece chapter released to then ask you if you want to", "which allows to hardcode this #without the use of regular expressions for 'http*'", "anchor tags <a> with 'href' attributes within the div #links respectively redirect to:download,", "of regular expressions for 'http*' action = input('would you like to download or", "to hardcode this #without the use of regular expressions for 'http*' action =", "scrapes Jaimini's box website to retrieve when was the last One Piece chapter", "= str(links[i]) links[i] = links[i].split('\"') #the 3 items in links are now lists", "== 'r': r_url = links[1][1] webbrowser.open(r_url) else: print('that was neither d nor r,", "webbrowser from os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r =", "class 'element' is the latest chapter #there are 3 anchor tags <a> with", "#list of len 3 with links inside <a> tags #convert links' content into", "redirect to:download, reading and the uploader's profile chapter = soup.find('div', class_='element') chapter_title =", "piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of len 3 with", "the div #links respectively redirect to:download, reading and the uploader's profile chapter =", "uploader's profile chapter = soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date = chapter.find('div',", "you if you want to read the chapter in your browser or download", "print('the latest one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of", "strings for i in range(len(links)): links[i] = str(links[i]) links[i] = links[i].split('\"') #the 3", "from bs4 import BeautifulSoup import requests import webbrowser from os import getcwd url", "the uploader's profile chapter = soup.find('div', class_='element') chapter_title = chapter.find('div', class_='title').text chapter_date =", "= links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close()", "'lxml') #the first <div> tag with class 'element' is the latest chapter #there", "print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of len 3 with links inside <a>", "div #links respectively redirect to:download, reading and the uploader's profile chapter = soup.find('div',", "3 items in links are now lists where the second element for each", "= requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the first <div> tag with class 'element'", "r2 = requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:',", "website to retrieve when was the last One Piece chapter released to then", "your browser or download it \"\"\" from bs4 import BeautifulSoup import requests import", "#the first <div> tag with class 'element' is the latest chapter #there are", "chapter_date = chapter.find('div', class_='meta_r').text print('the latest one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links", "allows to hardcode this #without the use of regular expressions for 'http*' action", "or download it \"\"\" from bs4 import BeautifulSoup import requests import webbrowser from", "if you want to read the chapter in your browser or download it", "action == 'd': d_url = links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip = open('%s'", "urls #print(links) #visually: [[htmlstuff, url, htmlstuff], [idem], [idem]] #This format has been consistently", "BeautifulSoup(r, 'lxml') #the first <div> tag with class 'element' is the latest chapter", "= 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r = requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the", "one piece chapter is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of len 3", "Script that scrapes Jaimini's box website to retrieve when was the last One", "second element for each are urls #print(links) #visually: [[htmlstuff, url, htmlstuff], [idem], [idem]]", "the use of regular expressions for 'http*' action = input('would you like to", "in links are now lists where the second element for each are urls", "#website with chapters r = requests.get(url).text soup = BeautifulSoup(r, 'lxml') #the first <div>", "the latest chapter #there are 3 anchor tags <a> with 'href' attributes within", "tags #convert links' content into strings to use split() to get the urls", "download it \"\"\" from bs4 import BeautifulSoup import requests import webbrowser from os", "consistently used, which allows to hardcode this #without the use of regular expressions", "use of regular expressions for 'http*' action = input('would you like to download", "is...\\n') print(chapter_title) print(chapter_date+'(yyyy/mm/dd)\\n') links = chapter.find_all('a') #list of len 3 with links inside", "#This format has been consistently used, which allows to hardcode this #without the", "read the chapter [d/r]?') if action == 'd': d_url = links[0][1] print('Downloading...') r2", "#the 3 items in links are now lists where the second element for", "the last One Piece chapter released to then ask you if you want", "read the chapter in your browser or download it \"\"\" from bs4 import", "split() to get the urls as strings for i in range(len(links)): links[i] =", "of len 3 with links inside <a> tags #convert links' content into strings", "= links[i].split('\"') #the 3 items in links are now lists where the second", "#there are 3 anchor tags <a> with 'href' attributes within the div #links", "url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r = requests.get(url).text soup = BeautifulSoup(r, 'lxml')", "action = input('would you like to download or read the chapter [d/r]?') if", "= BeautifulSoup(r, 'lxml') #the first <div> tag with class 'element' is the latest", "in range(len(links)): links[i] = str(links[i]) links[i] = links[i].split('\"') #the 3 items in links", "links[0][1] print('Downloading...') r2 = requests.get(d_url) chapter_zip = open('%s' %chapter_title, 'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip", "regular expressions for 'http*' action = input('would you like to download or read", "are now lists where the second element for each are urls #print(links) #visually:", "getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r = requests.get(url).text soup = BeautifulSoup(r,", "links are now lists where the second element for each are urls #print(links)", "from os import getcwd url = 'https://jaiminisbox.com/reader/series/one-piece-2/' #website with chapters r = requests.get(url).text", "chapter_title = chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the latest one piece chapter", "you like to download or read the chapter [d/r]?') if action == 'd':", "to download or read the chapter [d/r]?') if action == 'd': d_url =", "'http*' action = input('would you like to download or read the chapter [d/r]?')", "getcwd()) elif action == 'r': r_url = links[1][1] webbrowser.open(r_url) else: print('that was neither", "been consistently used, which allows to hardcode this #without the use of regular", "'wb') chapter_zip.write(r2.content) chapter_zip.close() print('Zip file in:', getcwd()) elif action == 'r': r_url =", "content into strings to use split() to get the urls as strings for", "then ask you if you want to read the chapter in your browser", "[idem], [idem]] #This format has been consistently used, which allows to hardcode this", "to then ask you if you want to read the chapter in your", "bs4 import BeautifulSoup import requests import webbrowser from os import getcwd url =", "Jaimini's box website to retrieve when was the last One Piece chapter released", "= chapter.find('div', class_='title').text chapter_date = chapter.find('div', class_='meta_r').text print('the latest one piece chapter is...\\n')", "= chapter.find_all('a') #list of len 3 with links inside <a> tags #convert links'" ]
[ "open('complexes.json', 'r', encoding='UTF-8') as f: jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString", "import json import xmltodict with open('complexes.json', 'r', encoding='UTF-8') as f: jsonString = f.read()", "encoding='UTF-8') as f: jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString),", "import xmltodict with open('complexes.json', 'r', encoding='UTF-8') as f: jsonString = f.read() print('JSON input", "as f: jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True)", "jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):')", "print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):') print(xmlString) with open('json_to_xml.xml', 'w') as f:", "xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):') print(xmlString) with open('json_to_xml.xml', 'w') as f: f.write(xmlString)", "json import xmltodict with open('complexes.json', 'r', encoding='UTF-8') as f: jsonString = f.read() print('JSON", "input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):') print(xmlString) with open('json_to_xml.xml', 'w')", "with open('complexes.json', 'r', encoding='UTF-8') as f: jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString)", "print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):') print(xmlString) with open('json_to_xml.xml',", "(json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):') print(xmlString) with open('json_to_xml.xml', 'w') as", "xmltodict with open('complexes.json', 'r', encoding='UTF-8') as f: jsonString = f.read() print('JSON input (json_to_xml.json):')", "'r', encoding='UTF-8') as f: jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString =", "f: jsonString = f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML", "= f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):') print(xmlString)", "f.read() print('JSON input (json_to_xml.json):') print(jsonString) xmlString = xmltodict.unparse(json.loads(jsonString), pretty=True) print('\\nXML output(json_to_xml.xml):') print(xmlString) with" ]
[ "Skapar matris som sedan fylls med bästa lösningarnas ID IDpoints = [] #", "= np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points", "- 1 def WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0)", "C[i, 2] = B[i, 2] * my_weights[j, 2] # Definera ideala värden A_positive", "ID ID_vektor = np.delete(Pareto_points, [0, 1, 2], 1).tolist() # skapar matris med outputvärden", "1] = B[i, 1] * my_weights[j, 1] C[i, 2] = B[i, 2] *", "med normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5]", "= calculate(input_points, dominates) my_weights = np.matrix( [[1, 0, 0], [0, 1, 0], [0,", "sum3 ** 0.5, 1] # Normaliserar matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while", "= np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp def dominates(row, normalized):", "# SLutar när man gått igenom alla punkter. break dp = np.array(list(dominatedPoints)) pp", "import CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def", "till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: # SLutar när", "0] - A_negative[0]) ** 2 + (C[i, 1] - A_negative[1]) ** 2 +", "Weights used to pick points from the ParetoFront pareto_case_ids = WeightPPpoints(pp, my_weights) for", "np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) # skapar en vektor med ID ID_vektor =", "0] = B[i, 0] * my_weights[j, 0] C[i, 1] = B[i, 1] *", "** 2 + (C[i, 1] - A_positive[1]) ** 2 + (C[i, 2] -", "S_positive[i] = ( (C[i, 0] - A_positive[0]) ** 2 + (C[i, 1] -", "np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points =", "** 2 + (C[i, 2] - A_negative[2]) ** 2 ) ** 0.5 C_value[i]", "2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1))", "2 + (C[i, 1] - A_negative[1]) ** 2 + (C[i, 2] - A_negative[2])", "if len(normalizedPoints) == 0: # SLutar när man gått igenom alla punkter. break", "in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >=", "if dominates( row, normalized ): # Går in om candidateRow är bättre än", "1] * my_weights[j, 1] C[i, 2] = B[i, 2] * my_weights[j, 2] #", "numpy as np from openmdao.api import CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir,", "i in range(0, Nrofrows): C[i, 0] = B[i, 0] * my_weights[j, 0] C[i,", "A_positive = [C[:, 0].min(), C[:, 1].min(), C[:, 2].min()] A_negative = [C[:, 0].max(), C[:,", "= sum1 + normalizedPoints[i, 0] ** 2 sum2 = sum2 + normalizedPoints[i, 1]", "summerar värden kolonnvis till nämnare för normalisering sum1 = sum1 + A[i, 0]", "Nrofcolumns) # Skapar matris som sedan fylls med bästa lösningarnas ID IDpoints =", "** 2 sum3 = sum3 + A[i, 2] ** 2 # definerar en", "normaliseras senare B = A.copy() sum1 = 0 sum2 = 0 sum3 =", "x in range(len(row) - 1)]) == len(row) - 1 def WeightPPpoints(pp, my_weights): Pareto_points", "len(normalizedPoints): row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates( row, normalized ): #", "CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants,", "C[:, 1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1)) C_value", "= np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows): S_positive[i] = (", "Nrofrows): # summerar värden kolonnvis till nämnare för normalisering sum1 = sum1 +", "= sum1 + A[i, 0] ** 2 sum2 = sum2 + A[i, 1]", "= list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp = calculate(input_points, dominates) my_weights = np.matrix(", "nämnare för normalisering sum1 = sum1 + normalizedPoints[i, 0] ** 2 sum2 =", "my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) # skapar en vektor", "+ (C[i, 2] - A_negative[2]) ** 2 ) ** 0.5 C_value[i] = S_negative[i]", "( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ):", "är större än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr", "for x in range(len(row) - 1)]) == len(row) - 1 def WeightPPpoints(pp, my_weights):", "in om utmanare är större än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr +=", "normalized ): # Går in om candidateRow är bättre än utmanaren. normalizedPoints =", "A = np.delete(Pareto_points, 3, 1) np.size(A) # definerar storleken på matrisen som kommer", "): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates): paretoPoints =", "# Loop för 5 olika viktningar -> 5 optimala pareto punkter som output", "model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr = 0 dominatedPoints", "0 for i in range(0, len(normalizedPoints)): # summerar värden kolonnvis till nämnare för", "= B[i, 0] * my_weights[j, 0] C[i, 1] = B[i, 1] * my_weights[j,", "- A_negative[1]) ** 2 + (C[i, 2] - A_negative[2]) ** 2 ) **", "lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm =", "B = A / myarray_normalize # kopierar matrisen och multiplicerar kolonnvis med viktningar", "sum2 = sum2 + A[i, 1] ** 2 sum3 = sum3 + A[i,", "while True: candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0)", "= 0 sum2 = 0 sum3 = 0 for i in range(0, Nrofrows):", "A[i, 1] ** 2 sum3 = sum3 + A[i, 2] ** 2 #", "# Normaliserar matrisen B = A / myarray_normalize # kopierar matrisen och multiplicerar", "in range(0, len(my_weights)): for i in range(0, Nrofrows): C[i, 0] = B[i, 0]", "dp, pp = calculate(input_points, dominates) my_weights = np.matrix( [[1, 0, 0], [0, 1,", "värden A_positive samt icke ideala värden A_negative A_positive = [C[:, 0].min(), C[:, 1].min(),", "punkter som output for j in range(0, len(my_weights)): for i in range(0, Nrofrows):", "from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm):", "1) np.size(A) # definerar storleken på matrisen som kommer som paretopoints output Nrofcolumns", "1] - A_positive[1]) ** 2 + (C[i, 2] - A_positive[2]) ** 2 )", "sedan fylls med bästa lösningarnas ID IDpoints = [] # skapar en kopia", "crm = get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp =", "= np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated = True while len(normalizedPoints)", "[sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5, 1] # Normaliserar matrisen", "for i in range(0, Nrofrows): C[i, 0] = B[i, 0] * my_weights[j, 0]", "): # Går in om candidateRow är bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints,", "fram vilket ingående ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def", "def dominates(row, normalized): # Beräknar om utmanare är bättre än candidate. return sum([row[x]", "om candidateRow är bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP))", "= pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) # skapar en vektor med ID", "= np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5))", "2] ** 2 # definerar en vektor med normaliseringsvärden myarray_normalize = [sum1 **", "rowIP = inputPoints[rowNr] if dominates( row, normalized ): # Går in om candidateRow", "i in range(0, Nrofrows): S_positive[i] = ( (C[i, 0] - A_positive[0]) ** 2", "= sum2 + normalizedPoints[i, 1] ** 2 sum3 = sum3 + normalizedPoints[i, 2]", "0.5] # Normaliserar matrisen B = A / myarray_normalize # kopierar matrisen och", "2] - A_negative[2]) ** 2 ) ** 0.5 C_value[i] = S_negative[i] / (S_negative[i]", "S_negative = np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in", "för normalisering sum1 = sum1 + A[i, 0] ** 2 sum2 = sum2", "med ID ID_vektor = np.delete(Pareto_points, [0, 1, 2], 1).tolist() # skapar matris med", "** 0.5, sum2 ** 0.5, sum3 ** 0.5] # Normaliserar matrisen B =", "for j in range(0, len(my_weights)): for i in range(0, Nrofrows): C[i, 0] =", "int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: # SLutar när man gått igenom alla", "i in range(0, Nrofrows): # summerar värden kolonnvis till nämnare för normalisering sum1", "största värdet Row_best_option = np.argmax(C_value) # ta fram vilket ingående ID lösningen har", "på matrisen som normaliseras senare B = A.copy() sum1 = 0 sum2 =", "problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(crm.list_cases()): model_case", "= 0 for i in range(0, len(normalizedPoints)): # summerar värden kolonnvis till nämnare", "= np.matrix( [[1, 0, 0], [0, 1, 0], [0, 0, 1]] ) #", "senare normalizedPoints = np.array(inputPoints.copy()) sum1 = 0 sum2 = 0 sum3 = 0", "my_weights = np.matrix( [[1, 0, 0], [0, 1, 0], [0, 0, 1]] )", "list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp = calculate(input_points, dominates) my_weights = np.matrix( [[1,", "else: rowNr += 1 if nonDominated: # Lägg till nondominated punkter till pareto", "in range(len(row) - 1)]) == len(row) - 1 def WeightPPpoints(pp, my_weights): Pareto_points =", "normalisering sum1 = sum1 + A[i, 0] ** 2 sum2 = sum2 +", "= [C[:, 0].min(), C[:, 1].min(), C[:, 2].min()] A_negative = [C[:, 0].max(), C[:, 1].max(),", "generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp,", "matrisen B = A / myarray_normalize # kopierar matrisen och multiplicerar kolonnvis med", "2] = B[i, 2] * my_weights[j, 2] # Definera ideala värden A_positive samt", "[sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5] # Normaliserar matrisen B", "B[i, 0] * my_weights[j, 0] C[i, 1] = B[i, 1] * my_weights[j, 1]", "Lägg till nondominated punkter till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) ==", "0.5, 1] # Normaliserar matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow", "1 def WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) #", "nämnare för normalisering sum1 = sum1 + A[i, 0] ** 2 sum2 =", "(C[i, 0] - A_negative[0]) ** 2 + (C[i, 1] - A_negative[1]) ** 2", "to pick points from the ParetoFront pareto_case_ids = WeightPPpoints(pp, my_weights) for i in", "= B[i, 1] * my_weights[j, 1] C[i, 2] = B[i, 2] * my_weights[j,", "bästa lösningarnas ID IDpoints = [] # skapar en kopia på matrisen som", "pp = calculate(input_points, dominates) my_weights = np.matrix( [[1, 0, 0], [0, 1, 0],", "# Definera ideala värden A_positive samt icke ideala värden A_negative A_positive = [C[:,", "som har det största värdet Row_best_option = np.argmax(C_value) # ta fram vilket ingående", "# Weights used to pick points from the ParetoFront pareto_case_ids = WeightPPpoints(pp, my_weights)", "1].min(), C[:, 2].min()] A_negative = [C[:, 0].max(), C[:, 1].max(), C[:, 2].max()] S_positive =", "= S_negative[i] / (S_negative[i] + S_positive[i]) Best_value = C_value.max() # ta fram vilken", "IDpoints def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points,", "+= 1 else: rowNr += 1 if nonDominated: # Lägg till nondominated punkter", "matris med outputvärden utan ID kolonn A = np.delete(Pareto_points, 3, 1) np.size(A) #", "in range(0, Nrofrows): C[i, 0] = B[i, 0] * my_weights[j, 0] C[i, 1]", "S_negative[i] / (S_negative[i] + S_positive[i]) Best_value = C_value.max() # ta fram vilken rad", "B[i, 2] * my_weights[j, 2] # Definera ideala värden A_positive samt icke ideala", "matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr] normalized =", "= (Nrofrows, Nrofcolumns) # Skapar matris som sedan fylls med bästa lösningarnas ID", "np.matrix( [[1, 0, 0], [0, 1, 0], [0, 0, 1]] ) # Weights", "kopia på matrisen som normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1 = 0 sum2", "definerar storleken på matrisen som kommer som paretopoints output Nrofcolumns = np.size(A, 1)", "while len(normalizedPoints) != 0 and rowNr < len(normalizedPoints): row = normalizedPoints[rowNr] rowIP =", "2 sum2 = sum2 + normalizedPoints[i, 1] ** 2 sum3 = sum3 +", "Nrofrows_pareto = np.size(Pareto_points, 0) # skapar en vektor med ID ID_vektor = np.delete(Pareto_points,", "for i in range(0, Nrofrows): # summerar värden kolonnvis till nämnare för normalisering", "0] * my_weights[j, 0] C[i, 1] = B[i, 1] * my_weights[j, 1] C[i,", "A_positive samt icke ideala värden A_negative A_positive = [C[:, 0].min(), C[:, 1].min(), C[:,", "S_positive = np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1)) #", "A.copy() sum1 = 0 sum2 = 0 sum3 = 0 for i in", "sum([row[x] >= normalized[x] for x in range(len(row) - 1)]) == len(row) - 1", "inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0", "in om candidateRow är bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP)", "viktningar -> 5 optimala pareto punkter som output for j in range(0, len(my_weights)):", "problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ]", "1)]) == len(row) - 1 def WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto", "2] # Definera ideala värden A_positive samt icke ideala värden A_negative A_positive =", "0].max(), C[:, 1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1))", "matrisen och multiplicerar kolonnvis med viktningar C = B.copy() # Loop för 5", "0] ** 2 sum2 = sum2 + A[i, 1] ** 2 sum3 =", "= inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr =", "generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1]", "normalized[x] for x in range(len(row) - 1)]) == len(row) - 1 def WeightPPpoints(pp,", "med normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5,", "model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield", "np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows): S_positive[i] = ( (C[i,", "0 sum3 = 0 for i in range(0, Nrofrows): # summerar värden kolonnvis", "= sum3 + A[i, 2] ** 2 # definerar en vektor med normaliseringsvärden", "ideala värden A_negative A_positive = [C[:, 0].min(), C[:, 1].min(), C[:, 2].min()] A_negative =", "np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp def dominates(row, normalized): #", "normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ): #", "sum1 = 0 sum2 = 0 sum3 = 0 for i in range(0,", "0 dominatedPoints = set() normalizedRowNr = 0 # skapar en kopia på matrisen", "** 0.5, sum3 ** 0.5, 1] # Normaliserar matrisen normalizedPoints = np.array(inputPoints) /", "np.delete(Pareto_points, [0, 1, 2], 1).tolist() # skapar matris med outputvärden utan ID kolonn", "skapar en kopia på matrisen som normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1 =", "np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns) # Skapar matris som sedan fylls med", "= normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates( row, normalized ): # Går in", "# ta fram vilket ingående ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return", "- A_positive[2]) ** 2 ) ** 0.5 S_negative[i] = ( (C[i, 0] -", "vektor med ID ID_vektor = np.delete(Pareto_points, [0, 1, 2], 1).tolist() # skapar matris", "normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5, 1]", "model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0],", "värden kolonnvis till nämnare för normalisering sum1 = sum1 + A[i, 0] **", "2] - A_positive[2]) ** 2 ) ** 0.5 S_negative[i] = ( (C[i, 0]", "SLutar när man gått igenom alla punkter. break dp = np.array(list(dominatedPoints)) pp =", "len(normalizedPoints) != 0 and rowNr < len(normalizedPoints): row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr]", "# Går in om utmanare är större än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow))", "np.delete(Pareto_points, 3, 1) np.size(A) # definerar storleken på matrisen som kommer som paretopoints", "definerar en vektor med normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5,", "0 sum4 = 0 for i in range(0, len(normalizedPoints)): # summerar värden kolonnvis", "punkter. break dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp", "# Beräknar om utmanare är bättre än candidate. return sum([row[x] >= normalized[x] for", "B[i, 1] * my_weights[j, 1] C[i, 2] = B[i, 2] * my_weights[j, 2]", "1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows): S_positive[i] = ( (C[i, 0]", "= [] # skapar en kopia på matrisen som normaliseras senare B =", "calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr = 0 dominatedPoints = set() normalizedRowNr =", "multiplicerar kolonnvis med viktningar C = B.copy() # Loop för 5 olika viktningar", "A_negative[0]) ** 2 + (C[i, 1] - A_negative[1]) ** 2 + (C[i, 2]", "A[i, 0] ** 2 sum2 = sum2 + A[i, 1] ** 2 sum3", "matrisen som normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1 = 0 sum2 = 0", "med outputvärden utan ID kolonn A = np.delete(Pareto_points, 3, 1) np.size(A) # definerar", "[0, 1, 2], 1).tolist() # skapar matris med outputvärden utan ID kolonn A", "calculate(input_points, dominates) my_weights = np.matrix( [[1, 0, 0], [0, 1, 0], [0, 0,", "som kommer som paretopoints output Nrofcolumns = np.size(A, 1) Nrofrows = np.size(A, 0)", "( (C[i, 0] - A_negative[0]) ** 2 + (C[i, 1] - A_negative[1]) **", "** 0.5] # Normaliserar matrisen B = A / myarray_normalize # kopierar matrisen", "row, normalized ): # Går in om candidateRow är bättre än utmanaren. normalizedPoints", "= np.size(A, 1) Nrofrows = np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns) # Skapar", "+ S_positive[i]) Best_value = C_value.max() # ta fram vilken rad i C_vektorn som", "C[i, 1] = B[i, 1] * my_weights[j, 1] C[i, 2] = B[i, 2]", "def calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr = 0 dominatedPoints = set() normalizedRowNr", "if nonDominated: # Lägg till nondominated punkter till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow))", "= sum3 + normalizedPoints[i, 2] ** 2 # definerar en vektor med normaliseringsvärden", "vilken rad i C_vektorn som har det största värdet Row_best_option = np.argmax(C_value) #", "S_negative[i] = ( (C[i, 0] - A_negative[0]) ** 2 + (C[i, 1] -", "!= 0 and rowNr < len(normalizedPoints): row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if", "kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr += 1 if", "har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir,", "som normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1 = 0 sum2 = 0 sum3", "1 else: rowNr += 1 if nonDominated: # Lägg till nondominated punkter till", "1)) C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows): S_positive[i]", "olika viktningar -> 5 optimala pareto punkter som output for j in range(0,", "import numpy as np from openmdao.api import CaseReader from optigurator.utils import recording_filename def", "** 0.5, sum3 ** 0.5] # Normaliserar matrisen B = A / myarray_normalize", "för 5 olika viktningar -> 5 optimala pareto punkter som output for j", "0.5, sum3 ** 0.5] # Normaliserar matrisen B = A / myarray_normalize #", "kolonnvis med viktningar C = B.copy() # Loop för 5 olika viktningar ->", "bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized,", "i in range(0, len(normalizedPoints)): # summerar värden kolonnvis till nämnare för normalisering sum1", "sum2 = sum2 + normalizedPoints[i, 1] ** 2 sum3 = sum3 + normalizedPoints[i,", "paretopoints output Nrofcolumns = np.size(A, 1) Nrofrows = np.size(A, 0) sizeofA = (Nrofrows,", "** 2 + (C[i, 1] - A_negative[1]) ** 2 + (C[i, 2] -", "[0, 1, 0], [0, 0, 1]] ) # Weights used to pick points", "= A.copy() sum1 = 0 sum2 = 0 sum3 = 0 for i", "j in range(0, len(my_weights)): for i in range(0, Nrofrows): C[i, 0] = B[i,", "openmdao.api import CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id))", "problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates): paretoPoints", "2 + (C[i, 1] - A_positive[1]) ** 2 + (C[i, 2] - A_positive[2])", "ideala värden A_positive samt icke ideala värden A_negative A_positive = [C[:, 0].min(), C[:,", "WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) # skapar en", "2 # definerar en vektor med normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2", "= C_value.max() # ta fram vilken rad i C_vektorn som har det största", "* my_weights[j, 0] C[i, 1] = B[i, 1] * my_weights[j, 1] C[i, 2]", "(i, case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and", "än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr += 1", "True while len(normalizedPoints) != 0 and rowNr < len(normalizedPoints): row = normalizedPoints[rowNr] rowIP", "[C[:, 0].max(), C[:, 1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows,", "0 # skapar en kopia på matrisen som normaliseras senare normalizedPoints = np.array(inputPoints.copy())", "1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1)) C_value =", "sum2 ** 0.5, sum3 ** 0.5, 1] # Normaliserar matrisen normalizedPoints = np.array(inputPoints)", "and rowNr < len(normalizedPoints): row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates( row,", "normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr]", "C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows,", "ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: # SLutar när man gått", "[[1, 0, 0], [0, 1, 0], [0, 0, 1]] ) # Weights used", "= 0 dominatedPoints = set() normalizedRowNr = 0 # skapar en kopia på", "icke ideala värden A_negative A_positive = [C[:, 0].min(), C[:, 1].min(), C[:, 2].min()] A_negative", "fram vilken rad i C_vektorn som har det största värdet Row_best_option = np.argmax(C_value)", "# kopierar matrisen och multiplicerar kolonnvis med viktningar C = B.copy() # Loop", "= 0 sum2 = 0 sum3 = 0 sum4 = 0 for i", "normalized, row ): # Går in om utmanare är större än kandidaten. nonDominated", "Best_value = C_value.max() # ta fram vilken rad i C_vektorn som har det", "myarray_normalize # kopierar matrisen och multiplicerar kolonnvis med viktningar C = B.copy() #", "+ normalizedPoints[i, 2] ** 2 # definerar en vektor med normaliseringsvärden myarray_normalize =", "# summerar värden kolonnvis till nämnare för normalisering sum1 = sum1 + A[i,", "ingående ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants):", "(C[i, 2] - A_negative[2]) ** 2 ) ** 0.5 C_value[i] = S_negative[i] /", "1, 0], [0, 0, 1]] ) # Weights used to pick points from", "0].min(), C[:, 1].min(), C[:, 2].min()] A_negative = [C[:, 0].max(), C[:, 1].max(), C[:, 2].max()]", "] def calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr = 0 dominatedPoints = set()", "- A_positive[1]) ** 2 + (C[i, 2] - A_positive[2]) ** 2 ) **", "0, 0], [0, 1, 0], [0, 0, 1]] ) # Weights used to", "Nrofrows = np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns) # Skapar matris som sedan", "= [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5, 1] # Normaliserar", "= 0 nonDominated = True while len(normalizedPoints) != 0 and rowNr < len(normalizedPoints):", "1] C[i, 2] = B[i, 2] * my_weights[j, 2] # Definera ideala värden", "till nämnare för normalisering sum1 = sum1 + A[i, 0] ** 2 sum2", "range(0, Nrofrows): S_positive[i] = ( (C[i, 0] - A_positive[0]) ** 2 + (C[i,", "utmanare är bättre än candidate. return sum([row[x] >= normalized[x] for x in range(len(row)", "har det största värdet Row_best_option = np.argmax(C_value) # ta fram vilket ingående ID", "A_positive[1]) ** 2 + (C[i, 2] - A_positive[2]) ** 2 ) ** 0.5", "0: # SLutar när man gått igenom alla punkter. break dp = np.array(list(dominatedPoints))", "2 + (C[i, 2] - A_negative[2]) ** 2 ) ** 0.5 C_value[i] =", "= 0 sum4 = 0 for i in range(0, len(normalizedPoints)): # summerar värden", "model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints,", "1] ** 2 sum3 = sum3 + A[i, 2] ** 2 # definerar", "skapar en vektor med ID ID_vektor = np.delete(Pareto_points, [0, 1, 2], 1).tolist() #", "get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(crm.list_cases()):", "= int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: # SLutar när man gått igenom", "normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5] #", "False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr += 1 if nonDominated: # Lägg", "0 sum3 = 0 sum4 = 0 for i in range(0, len(normalizedPoints)): #", "0 sum2 = 0 sum3 = 0 sum4 = 0 for i in", ">= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i,", "normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates( row, normalized ): # Går in om", "return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm))", "candidateRow är bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif", "A_positive[0]) ** 2 + (C[i, 1] - A_positive[1]) ** 2 + (C[i, 2]", "np.argmax(C_value) # ta fram vilket ingående ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala)))", "sum2 = 0 sum3 = 0 for i in range(0, Nrofrows): # summerar", "för normalisering sum1 = sum1 + normalizedPoints[i, 0] ** 2 sum2 = sum2", "(C[i, 0] - A_positive[0]) ** 2 + (C[i, 1] - A_positive[1]) ** 2", "matrisen som kommer som paretopoints output Nrofcolumns = np.size(A, 1) Nrofrows = np.size(A,", "# ta fram vilken rad i C_vektorn som har det största värdet Row_best_option", "Loop för 5 olika viktningar -> 5 optimala pareto punkter som output for", "3, 1) np.size(A) # definerar storleken på matrisen som kommer som paretopoints output", "output for j in range(0, len(my_weights)): for i in range(0, Nrofrows): C[i, 0]", "pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: # SLutar när man", "0 and rowNr < len(normalizedPoints): row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates(", "kolonnvis till nämnare för normalisering sum1 = sum1 + normalizedPoints[i, 0] ** 2", "0) sizeofA = (Nrofrows, Nrofcolumns) # Skapar matris som sedan fylls med bästa", "= ( (C[i, 0] - A_positive[0]) ** 2 + (C[i, 1] - A_positive[1])", "0] ** 2 sum2 = sum2 + normalizedPoints[i, 1] ** 2 sum3 =", "= True while len(normalizedPoints) != 0 and rowNr < len(normalizedPoints): row = normalizedPoints[rowNr]", "sum3 + A[i, 2] ** 2 # definerar en vektor med normaliseringsvärden myarray_normalize", "ID_vektor = np.delete(Pareto_points, [0, 1, 2], 1).tolist() # skapar matris med outputvärden utan", "rad i C_vektorn som har det största värdet Row_best_option = np.argmax(C_value) # ta", "ID kolonn A = np.delete(Pareto_points, 3, 1) np.size(A) # definerar storleken på matrisen", "optimala pareto punkter som output for j in range(0, len(my_weights)): for i in", "recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id)", "IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants,", "# Lägg till nondominated punkter till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints)", "matris som sedan fylls med bästa lösningarnas ID IDpoints = [] # skapar", "+ A[i, 2] ** 2 # definerar en vektor med normaliseringsvärden myarray_normalize =", "= [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5] # Normaliserar matrisen", "np.zeros((Nrofrows, 1)) S_negative = np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for", "0.5 C_value[i] = S_negative[i] / (S_negative[i] + S_positive[i]) Best_value = C_value.max() # ta", "# skapar en kopia på matrisen som normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1", "kommer som paretopoints output Nrofcolumns = np.size(A, 1) Nrofrows = np.size(A, 0) sizeofA", "** 0.5, 1] # Normaliserar matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while True:", "len(normalizedPoints) == 0: # SLutar när man gått igenom alla punkter. break dp", "sum3 ** 0.5] # Normaliserar matrisen B = A / myarray_normalize # kopierar", "crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] >", "2 ) ** 0.5 C_value[i] = S_negative[i] / (S_negative[i] + S_positive[i]) Best_value =", "Normaliserar matrisen B = A / myarray_normalize # kopierar matrisen och multiplicerar kolonnvis", "in range(0, len(normalizedPoints)): # summerar värden kolonnvis till nämnare för normalisering sum1 =", "C = B.copy() # Loop för 5 olika viktningar -> 5 optimala pareto", "CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id)", "0] C[i, 1] = B[i, 1] * my_weights[j, 1] C[i, 2] = B[i,", "model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr = 0", "normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated", "crm): for (i, case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <=", "from openmdao.api import CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir,", "fylls med bästa lösningarnas ID IDpoints = [] # skapar en kopia på", "<= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [", "normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated = True while len(normalizedPoints) != 0", "= np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints", "som output for j in range(0, len(my_weights)): for i in range(0, Nrofrows): C[i,", "sum2 ** 0.5, sum3 ** 0.5] # Normaliserar matrisen B = A /", "5 optimala pareto punkter som output for j in range(0, len(my_weights)): for i", "used to pick points from the ParetoFront pareto_case_ids = WeightPPpoints(pp, my_weights) for i", "sum3 = 0 for i in range(0, Nrofrows): # summerar värden kolonnvis till", "-> 5 optimala pareto punkter som output for j in range(0, len(my_weights)): for", "Går in om candidateRow är bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0)", "+ (C[i, 2] - A_positive[2]) ** 2 ) ** 0.5 S_negative[i] = (", "(S_negative[i] + S_positive[i]) Best_value = C_value.max() # ta fram vilken rad i C_vektorn", "** 2 sum2 = sum2 + normalizedPoints[i, 1] ** 2 sum3 = sum3", "1]] ) # Weights used to pick points from the ParetoFront pareto_case_ids =", "sum1 = 0 sum2 = 0 sum3 = 0 sum4 = 0 for", "kolonnvis till nämnare för normalisering sum1 = sum1 + A[i, 0] ** 2", "A_negative A_positive = [C[:, 0].min(), C[:, 1].min(), C[:, 2].min()] A_negative = [C[:, 0].max(),", "rowNr += 1 else: rowNr += 1 if nonDominated: # Lägg till nondominated", "pareto_points, dominated_points, dp, pp = calculate(input_points, dominates) my_weights = np.matrix( [[1, 0, 0],", "+ A[i, 0] ** 2 sum2 = sum2 + A[i, 1] ** 2", "- A_positive[0]) ** 2 + (C[i, 1] - A_positive[1]) ** 2 + (C[i,", "C_value[i] = S_negative[i] / (S_negative[i] + S_positive[i]) Best_value = C_value.max() # ta fram", "problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp", "rowNr < len(normalizedPoints): row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates( row, normalized", "= ( (C[i, 0] - A_negative[0]) ** 2 + (C[i, 1] - A_negative[1])", ">= normalized[x] for x in range(len(row) - 1)]) == len(row) - 1 def", "om utmanare är bättre än candidate. return sum([row[x] >= normalized[x] for x in", "A_negative = [C[:, 0].max(), C[:, 1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative", "return sum([row[x] >= normalized[x] for x in range(len(row) - 1)]) == len(row) -", "0.5, sum3 ** 0.5, 1] # Normaliserar matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize)", "= set() candidateRowNr = 0 dominatedPoints = set() normalizedRowNr = 0 # skapar", "= 0 # skapar en kopia på matrisen som normaliseras senare normalizedPoints =", "sum1 + A[i, 0] ** 2 sum2 = sum2 + A[i, 1] **", "dominatedPoints, dp, pp def dominates(row, normalized): # Beräknar om utmanare är bättre än", "rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ): # Går in om", "= np.array(inputPoints.copy()) sum1 = 0 sum2 = 0 sum3 = 0 sum4 =", "Row_best_option = np.argmax(C_value) # ta fram vilket ingående ID lösningen har Vektor_ID_optimala =", "for i in range(0, len(normalizedPoints)): # summerar värden kolonnvis till nämnare för normalisering", "om utmanare är större än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr += 1", "C_vektorn som har det största värdet Row_best_option = np.argmax(C_value) # ta fram vilket", "# Normaliserar matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr]", "normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated = True while", "output Nrofcolumns = np.size(A, 1) Nrofrows = np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns)", "nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr += 1 if nonDominated:", "np from openmdao.api import CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return", "0] - A_positive[0]) ** 2 + (C[i, 1] - A_positive[1]) ** 2 +", "när man gått igenom alla punkter. break dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints))", "[] # skapar en kopia på matrisen som normaliseras senare B = A.copy()", "my_weights[j, 0] C[i, 1] = B[i, 1] * my_weights[j, 1] C[i, 2] =", "np.array(inputPoints.copy()) sum1 = 0 sum2 = 0 sum3 = 0 sum4 = 0", "normalizedPoints = np.array(inputPoints.copy()) sum1 = 0 sum2 = 0 sum3 = 0 sum4", "0], [0, 1, 0], [0, 0, 1]] ) # Weights used to pick", "[C[:, 0].min(), C[:, 1].min(), C[:, 2].min()] A_negative = [C[:, 0].max(), C[:, 1].max(), C[:,", "def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id) in", "my_weights[j, 1] C[i, 2] = B[i, 2] * my_weights[j, 2] # Definera ideala", "> problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates):", "2 sum2 = sum2 + A[i, 1] ** 2 sum3 = sum3 +", "med bästa lösningarnas ID IDpoints = [] # skapar en kopia på matrisen", "** 2 ) ** 0.5 C_value[i] = S_negative[i] / (S_negative[i] + S_positive[i]) Best_value", "+ A[i, 1] ** 2 sum3 = sum3 + A[i, 2] ** 2", "sum2 = 0 sum3 = 0 sum4 = 0 for i in range(0,", "np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints =", "igenom alla punkter. break dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints,", "return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(crm.list_cases()): model_case =", "en vektor med ID ID_vektor = np.delete(Pareto_points, [0, 1, 2], 1).tolist() # skapar", "# Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows): S_positive[i] = ( (C[i, 0] -", "in range(0, Nrofrows): S_positive[i] = ( (C[i, 0] - A_positive[0]) ** 2 +", "** 2 sum2 = sum2 + A[i, 1] ** 2 sum3 = sum3", "** 0.5 S_negative[i] = ( (C[i, 0] - A_negative[0]) ** 2 + (C[i,", "man gått igenom alla punkter. break dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return", "+ (C[i, 1] - A_positive[1]) ** 2 + (C[i, 2] - A_positive[2]) **", "= np.delete(Pareto_points, [0, 1, 2], 1).tolist() # skapar matris med outputvärden utan ID", "np.size(A) # definerar storleken på matrisen som kommer som paretopoints output Nrofcolumns =", "1)) S_negative = np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i", "nonDominated: # Lägg till nondominated punkter till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if", "outputvärden utan ID kolonn A = np.delete(Pareto_points, 3, 1) np.size(A) # definerar storleken", "= 0 sum3 = 0 for i in range(0, Nrofrows): # summerar värden", "np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows):", "0) # skapar en vektor med ID ID_vektor = np.delete(Pareto_points, [0, 1, 2],", "* my_weights[j, 2] # Definera ideala värden A_positive samt icke ideala värden A_negative", "vektor med normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3 **", "matrisen som normaliseras senare B = A.copy() sum1 = 0 sum2 = 0", "normalizedPoints[i, 0] ** 2 sum2 = sum2 + normalizedPoints[i, 1] ** 2 sum3", "set() candidateRowNr = 0 dominatedPoints = set() normalizedRowNr = 0 # skapar en", "än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row", "= B.copy() # Loop för 5 olika viktningar -> 5 optimala pareto punkter", "dominates) my_weights = np.matrix( [[1, 0, 0], [0, 1, 0], [0, 0, 1]]", "större än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr +=", "return paretoPoints, dominatedPoints, dp, pp def dominates(row, normalized): # Beräknar om utmanare är", "bättre än candidate. return sum([row[x] >= normalized[x] for x in range(len(row) - 1)])", "pareto punkter som output for j in range(0, len(my_weights)): for i in range(0,", "enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower", "värden kolonnvis till nämnare för normalisering sum1 = sum1 + normalizedPoints[i, 0] **", "candidateRowNr = 0 dominatedPoints = set() normalizedRowNr = 0 # skapar en kopia", "kopia på matrisen som normaliseras senare B = A.copy() sum1 = 0 sum2", "= A / myarray_normalize # kopierar matrisen och multiplicerar kolonnvis med viktningar C", "** 0.5 C_value[i] = S_negative[i] / (S_negative[i] + S_positive[i]) Best_value = C_value.max() #", "break dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp def", "1) Nrofrows = np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns) # Skapar matris som", "def WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) # skapar", "summerar värden kolonnvis till nämnare för normalisering sum1 = sum1 + normalizedPoints[i, 0]", "viktningar C = B.copy() # Loop för 5 olika viktningar -> 5 optimala", "paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: # SLutar när man gått igenom alla punkter.", "# summerar värden kolonnvis till nämnare för normalisering sum1 = sum1 + normalizedPoints[i,", "# definerar en vektor med normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 **", "): # Går in om utmanare är större än kandidaten. nonDominated = False", "set() normalizedRowNr = 0 # skapar en kopia på matrisen som normaliseras senare", "= [C[:, 0].max(), C[:, 1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1)) S_negative =", "- A_negative[2]) ** 2 ) ** 0.5 C_value[i] = S_negative[i] / (S_negative[i] +", "sum3 = 0 sum4 = 0 for i in range(0, len(normalizedPoints)): # summerar", "(C[i, 1] - A_positive[1]) ** 2 + (C[i, 2] - A_positive[2]) ** 2", "normalizedPoints[i, 2] ** 2 # definerar en vektor med normaliseringsvärden myarray_normalize = [sum1", "normalized): # Beräknar om utmanare är bättre än candidate. return sum([row[x] >= normalized[x]", "= np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns) # Skapar matris som sedan fylls", "points from the ParetoFront pareto_case_ids = WeightPPpoints(pp, my_weights) for i in pareto_case_ids: yield", "skapar en kopia på matrisen som normaliseras senare B = A.copy() sum1 =", "+ (C[i, 1] - A_negative[1]) ** 2 + (C[i, 2] - A_negative[2]) **", "np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated = True while len(normalizedPoints) !=", "for i in range(0, Nrofrows): S_positive[i] = ( (C[i, 0] - A_positive[0]) **", "0.5 S_negative[i] = ( (C[i, 0] - A_negative[0]) ** 2 + (C[i, 1]", "2 sum3 = sum3 + normalizedPoints[i, 2] ** 2 # definerar en vektor", "problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp = calculate(input_points, dominates) my_weights", "med viktningar C = B.copy() # Loop för 5 olika viktningar -> 5", "1 if nonDominated: # Lägg till nondominated punkter till pareto ID = int(normalized[3])", "= B[i, 2] * my_weights[j, 2] # Definera ideala värden A_positive samt icke", "(C[i, 2] - A_positive[2]) ** 2 ) ** 0.5 S_negative[i] = ( (C[i,", ") ** 0.5 S_negative[i] = ( (C[i, 0] - A_negative[0]) ** 2 +", "storleken på matrisen som kommer som paretopoints output Nrofcolumns = np.size(A, 1) Nrofrows", "1] - A_negative[1]) ** 2 + (C[i, 2] - A_negative[2]) ** 2 )", "0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ): # Går in om utmanare", "pick points from the ParetoFront pareto_case_ids = WeightPPpoints(pp, my_weights) for i in pareto_case_ids:", "rowNr = 0 nonDominated = True while len(normalizedPoints) != 0 and rowNr <", "1, 2], 1).tolist() # skapar matris med outputvärden utan ID kolonn A =", "+ normalizedPoints[i, 1] ** 2 sum3 = sum3 + normalizedPoints[i, 2] ** 2", "elif dominates( normalized, row ): # Går in om utmanare är större än", "vilket ingående ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir,", "värden A_negative A_positive = [C[:, 0].min(), C[:, 1].min(), C[:, 2].min()] A_negative = [C[:,", "# Går in om candidateRow är bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr,", "+ normalizedPoints[i, 0] ** 2 sum2 = sum2 + normalizedPoints[i, 1] ** 2", "def generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if (", "np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ): # Går in", "pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp def dominates(row, normalized): # Beräknar", "def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points,", "range(len(row) - 1)]) == len(row) - 1 def WeightPPpoints(pp, my_weights): Pareto_points = pp", "ID IDpoints = [] # skapar en kopia på matrisen som normaliseras senare", ") ** 0.5 C_value[i] = S_negative[i] / (S_negative[i] + S_positive[i]) Best_value = C_value.max()", "crm)) pareto_points, dominated_points, dp, pp = calculate(input_points, dominates) my_weights = np.matrix( [[1, 0,", "Nrofrows): S_positive[i] = ( (C[i, 0] - A_positive[0]) ** 2 + (C[i, 1]", "for (i, case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper", "sum2 + normalizedPoints[i, 1] ** 2 sum3 = sum3 + normalizedPoints[i, 2] **", "normalizedPoints[i, 1] ** 2 sum3 = sum3 + normalizedPoints[i, 2] ** 2 #", "# skapar en vektor med ID ID_vektor = np.delete(Pareto_points, [0, 1, 2], 1).tolist()", "= crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0]", "normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1 = 0 sum2 = 0 sum3 =", "0], [0, 0, 1]] ) # Weights used to pick points from the", "row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates( row, normalized ): # Går", "np.size(Pareto_points, 0) # skapar en vektor med ID ID_vektor = np.delete(Pareto_points, [0, 1,", "0 sum2 = 0 sum3 = 0 for i in range(0, Nrofrows): #", "normalisering sum1 = sum1 + normalizedPoints[i, 0] ** 2 sum2 = sum2 +", "Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) # skapar en vektor med", "= np.delete(Pareto_points, 3, 1) np.size(A) # definerar storleken på matrisen som kommer som", "== len(row) - 1 def WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto =", "utmanare är större än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else:", "B.copy() # Loop för 5 olika viktningar -> 5 optimala pareto punkter som", "en kopia på matrisen som normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1 = 0", "än candidate. return sum([row[x] >= normalized[x] for x in range(len(row) - 1)]) ==", "row ): # Går in om utmanare är större än kandidaten. nonDominated =", "== 0: # SLutar när man gått igenom alla punkter. break dp =", "np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp def dominates(row, normalized): # Beräknar om utmanare", "värdet Row_best_option = np.argmax(C_value) # ta fram vilket ingående ID lösningen har Vektor_ID_optimala", "len(normalizedPoints)): # summerar värden kolonnvis till nämnare för normalisering sum1 = sum1 +", "det största värdet Row_best_option = np.argmax(C_value) # ta fram vilket ingående ID lösningen", "ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm", "C_value.max() # ta fram vilken rad i C_vektorn som har det största värdet", "sum3 = sum3 + normalizedPoints[i, 2] ** 2 # definerar en vektor med", "= 0 sum3 = 0 sum4 = 0 for i in range(0, len(normalizedPoints)):", "0.5, sum2 ** 0.5, sum3 ** 0.5] # Normaliserar matrisen B = A", "range(0, len(normalizedPoints)): # summerar värden kolonnvis till nämnare för normalisering sum1 = sum1", "en kopia på matrisen som normaliseras senare B = A.copy() sum1 = 0", "och multiplicerar kolonnvis med viktningar C = B.copy() # Loop för 5 olika", "2 ) ** 0.5 S_negative[i] = ( (C[i, 0] - A_negative[0]) ** 2", "pp def dominates(row, normalized): # Beräknar om utmanare är bättre än candidate. return", "0 nonDominated = True while len(normalizedPoints) != 0 and rowNr < len(normalizedPoints): row", "ta fram vilket ingående ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints", "- 1)]) == len(row) - 1 def WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points)", "sum3 + normalizedPoints[i, 2] ** 2 # definerar en vektor med normaliseringsvärden myarray_normalize", "utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ):", "är bättre än candidate. return sum([row[x] >= normalized[x] for x in range(len(row) -", "som sedan fylls med bästa lösningarnas ID IDpoints = [] # skapar en", "and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def", "kopierar matrisen och multiplicerar kolonnvis med viktningar C = B.copy() # Loop för", "range(0, len(my_weights)): for i in range(0, Nrofrows): C[i, 0] = B[i, 0] *", "= np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp def dominates(row, normalized): # Beräknar om", "as np from openmdao.api import CaseReader from optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants):", "A / myarray_normalize # kopierar matrisen och multiplicerar kolonnvis med viktningar C =", "optigurator.utils import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for", "S_positive[i]) Best_value = C_value.max() # ta fram vilken rad i C_vektorn som har", "model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and", "i C_vektorn som har det största värdet Row_best_option = np.argmax(C_value) # ta fram", "problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0],", "senare B = A.copy() sum1 = 0 sum2 = 0 sum3 = 0", "np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr,", "sum1 = sum1 + normalizedPoints[i, 0] ** 2 sum2 = sum2 + normalizedPoints[i,", "dominates): paretoPoints = set() candidateRowNr = 0 dominatedPoints = set() normalizedRowNr = 0", "IDpoints = [] # skapar en kopia på matrisen som normaliseras senare B", "en vektor med normaliseringsvärden myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3", "len(my_weights)): for i in range(0, Nrofrows): C[i, 0] = B[i, 0] * my_weights[j,", "2 sum3 = sum3 + A[i, 2] ** 2 # definerar en vektor", "[ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr", "pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points, 0) # skapar en vektor med ID ID_vektor", "(C[i, 1] - A_negative[1]) ** 2 + (C[i, 2] - A_negative[2]) ** 2", "# skapar en kopia på matrisen som normaliseras senare B = A.copy() sum1", "get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp = calculate(input_points, dominates)", "/ myarray_normalize # kopierar matrisen och multiplicerar kolonnvis med viktningar C = B.copy()", "kolonn A = np.delete(Pareto_points, 3, 1) np.size(A) # definerar storleken på matrisen som", "på matrisen som normaliseras senare normalizedPoints = np.array(inputPoints.copy()) sum1 = 0 sum2 =", "5 olika viktningar -> 5 optimala pareto punkter som output for j in", "A_positive[2]) ** 2 ) ** 0.5 S_negative[i] = ( (C[i, 0] - A_negative[0])", "sum1 + normalizedPoints[i, 0] ** 2 sum2 = sum2 + normalizedPoints[i, 1] **", "dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp, pp def dominates(row,", "skapar matris med outputvärden utan ID kolonn A = np.delete(Pareto_points, 3, 1) np.size(A)", "from the ParetoFront pareto_case_ids = WeightPPpoints(pp, my_weights) for i in pareto_case_ids: yield crm.get_case(i)", "sum3 = sum3 + A[i, 2] ** 2 # definerar en vektor med", "2].min()] A_negative = [C[:, 0].max(), C[:, 1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows, 1))", "yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates): paretoPoints = set()", "A_negative[1]) ** 2 + (C[i, 2] - A_negative[2]) ** 2 ) ** 0.5", "+= 1 if nonDominated: # Lägg till nondominated punkter till pareto ID =", "= np.zeros((Nrofrows, 1)) C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in range(0,", "alla punkter. break dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints, dominatedPoints, dp,", "# skapar matris med outputvärden utan ID kolonn A = np.delete(Pareto_points, 3, 1)", "* my_weights[j, 1] C[i, 2] = B[i, 2] * my_weights[j, 2] # Definera", "** 2 + (C[i, 2] - A_positive[2]) ** 2 ) ** 0.5 S_negative[i]", "Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows): S_positive[i] = ( (C[i, 0] - A_positive[0])", "in range(0, Nrofrows): # summerar värden kolonnvis till nämnare för normalisering sum1 =", "myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5, 1] #", "Nrofrows): C[i, 0] = B[i, 0] * my_weights[j, 0] C[i, 1] = B[i,", "sizeofA = (Nrofrows, Nrofcolumns) # Skapar matris som sedan fylls med bästa lösningarnas", "2] * my_weights[j, 2] # Definera ideala värden A_positive samt icke ideala värden", "Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist() IDpoints.append(int(max(Vektor_ID_optimala))) return IDpoints def generate_pareto_cases(data_dir, problem_constants): crm = get_case_reader(data_dir, problem_constants)", "C_value = np.zeros((Nrofrows, 1)) # Vektor_ID_optimala=np.zeros((1,5)) for i in range(0, Nrofrows): S_positive[i] =", "< len(normalizedPoints): row = normalizedPoints[rowNr] rowIP = inputPoints[rowNr] if dominates( row, normalized ):", "sum2 + A[i, 1] ** 2 sum3 = sum3 + A[i, 2] **", "C[:, 1].min(), C[:, 2].min()] A_negative = [C[:, 0].max(), C[:, 1].max(), C[:, 2].max()] S_positive", "/ np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints,", "normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated = True", "till nämnare för normalisering sum1 = sum1 + normalizedPoints[i, 0] ** 2 sum2", "model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0], model_case.outputs[\"price_availability.total_delivery_time\"][0], i, ] def calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr =", "= np.argmax(C_value) # ta fram vilket ingående ID lösningen har Vektor_ID_optimala = np.array(ID_vektor[Row_best_option]).tolist()", "Går in om utmanare är större än kandidaten. nonDominated = False dominatedPoints.add(tuple(candidateRow)) rowNr", "candidate. return sum([row[x] >= normalized[x] for x in range(len(row) - 1)]) == len(row)", "dp, pp def dominates(row, normalized): # Beräknar om utmanare är bättre än candidate.", "** 2 ) ** 0.5 S_negative[i] = ( (C[i, 0] - A_negative[0]) **", "- A_negative[0]) ** 2 + (C[i, 1] - A_negative[1]) ** 2 + (C[i,", "= np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ): # Går", "A[i, 2] ** 2 # definerar en vektor med normaliseringsvärden myarray_normalize = [sum1", "dominates(row, normalized): # Beräknar om utmanare är bättre än candidate. return sum([row[x] >=", "dominates( normalized, row ): # Går in om utmanare är större än kandidaten.", "0 for i in range(0, Nrofrows): # summerar värden kolonnvis till nämnare för", "till nondominated punkter till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0:", "normalizedRowNr = 0 # skapar en kopia på matrisen som normaliseras senare normalizedPoints", "rowNr += 1 if nonDominated: # Lägg till nondominated punkter till pareto ID", "# Skapar matris som sedan fylls med bästa lösningarnas ID IDpoints = []", "0, 1]] ) # Weights used to pick points from the ParetoFront pareto_case_ids", "gått igenom alla punkter. break dp = np.array(list(dominatedPoints)) pp = np.array(list(paretoPoints)) return paretoPoints,", "if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower", "** 2 sum3 = sum3 + normalizedPoints[i, 2] ** 2 # definerar en", "= set() normalizedRowNr = 0 # skapar en kopia på matrisen som normaliseras", "som paretopoints output Nrofcolumns = np.size(A, 1) Nrofrows = np.size(A, 0) sizeofA =", "2], 1).tolist() # skapar matris med outputvärden utan ID kolonn A = np.delete(Pareto_points,", "= np.size(Pareto_points, 0) # skapar en vektor med ID ID_vektor = np.delete(Pareto_points, [0,", "nondominated punkter till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: #", "2 + (C[i, 2] - A_positive[2]) ** 2 ) ** 0.5 S_negative[i] =", "utan ID kolonn A = np.delete(Pareto_points, 3, 1) np.size(A) # definerar storleken på", "Definera ideala värden A_positive samt icke ideala värden A_negative A_positive = [C[:, 0].min(),", "= sum2 + A[i, 1] ** 2 sum3 = sum3 + A[i, 2]", ") # Weights used to pick points from the ParetoFront pareto_case_ids = WeightPPpoints(pp,", "paretoPoints = set() candidateRowNr = 0 dominatedPoints = set() normalizedRowNr = 0 #", "B = A.copy() sum1 = 0 sum2 = 0 sum3 = 0 for", "True: candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow)", "1] # Normaliserar matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow =", "inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ): # Går in om utmanare är", "= False dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr += 1 if nonDominated: #", "0.5, sum2 ** 0.5, sum3 ** 0.5, 1] # Normaliserar matrisen normalizedPoints =", "import recording_filename def get_case_reader(data_dir, problem_constants): return CaseReader(recording_filename(data_dir, problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i,", "är bättre än utmanaren. normalizedPoints = np.delete(normalizedPoints, rowNr, 0) inputPoints.remove(rowIP) dominatedPoints.add(tuple(rowIP)) elif dominates(", "samt icke ideala värden A_negative A_positive = [C[:, 0].min(), C[:, 1].min(), C[:, 2].min()]", "len(row) - 1 def WeightPPpoints(pp, my_weights): Pareto_points = pp np.size(Pareto_points) Nrofrows_pareto = np.size(Pareto_points,", "1).tolist() # skapar matris med outputvärden utan ID kolonn A = np.delete(Pareto_points, 3,", "C[:, 2].min()] A_negative = [C[:, 0].max(), C[:, 1].max(), C[:, 2].max()] S_positive = np.zeros((Nrofrows,", "np.size(A, 1) Nrofrows = np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns) # Skapar matris", "sum1 = sum1 + A[i, 0] ** 2 sum2 = sum2 + A[i,", "dominatedPoints.add(tuple(rowIP)) elif dominates( normalized, row ): # Går in om utmanare är större", "C[i, 0] = B[i, 0] * my_weights[j, 0] C[i, 1] = B[i, 1]", "0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated = True while len(normalizedPoints) != 0 and", "Nrofcolumns = np.size(A, 1) Nrofrows = np.size(A, 0) sizeofA = (Nrofrows, Nrofcolumns) #", "(Nrofrows, Nrofcolumns) # Skapar matris som sedan fylls med bästa lösningarnas ID IDpoints", "range(0, Nrofrows): C[i, 0] = B[i, 0] * my_weights[j, 0] C[i, 1] =", "my_weights[j, 2] # Definera ideala värden A_positive samt icke ideala värden A_negative A_positive", "Normaliserar matrisen normalizedPoints = np.array(inputPoints) / np.array(myarray_normalize) while True: candidateRow = inputPoints[candidateRowNr] normalized", "paretoPoints, dominatedPoints, dp, pp def dominates(row, normalized): # Beräknar om utmanare är bättre", "dominatedPoints = set() normalizedRowNr = 0 # skapar en kopia på matrisen som", "som normaliseras senare B = A.copy() sum1 = 0 sum2 = 0 sum3", "/ (S_negative[i] + S_positive[i]) Best_value = C_value.max() # ta fram vilken rad i", "( (C[i, 0] - A_positive[0]) ** 2 + (C[i, 1] - A_positive[1]) **", "# definerar storleken på matrisen som kommer som paretopoints output Nrofcolumns = np.size(A,", "problem_constants.id)) def generate_valid_points(problem_constants, crm): for (i, case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if", "[0, 0, 1]] ) # Weights used to pick points from the ParetoFront", "= normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr = 0 nonDominated =", "lösningarnas ID IDpoints = [] # skapar en kopia på matrisen som normaliseras", "dominatedPoints.add(tuple(candidateRow)) rowNr += 1 else: rowNr += 1 if nonDominated: # Lägg till", "and model_case.outputs[\"usability.min_max_step_depth\"][0] >= problem_constants.step_depth.lower and model_case.outputs[\"usability.min_free_height\"][0] > problem_constants.free_height_lower ): yield [ model_case.outputs[\"price_availability.total_price\"][0], model_case.outputs[\"usability.usability_penalty\"][0],", "1] ** 2 sum3 = sum3 + normalizedPoints[i, 2] ** 2 # definerar", "= 0 for i in range(0, Nrofrows): # summerar värden kolonnvis till nämnare", "= get_case_reader(data_dir, problem_constants) input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp = calculate(input_points,", "dominated_points, dp, pp = calculate(input_points, dominates) my_weights = np.matrix( [[1, 0, 0], [0,", "nonDominated = True while len(normalizedPoints) != 0 and rowNr < len(normalizedPoints): row =", "Beräknar om utmanare är bättre än candidate. return sum([row[x] >= normalized[x] for x", "dominates( row, normalized ): # Går in om candidateRow är bättre än utmanaren.", "punkter till pareto ID = int(normalized[3]) paretoPoints.add(tuple(candidateRow)) if len(normalizedPoints) == 0: # SLutar", "case_id) in enumerate(crm.list_cases()): model_case = crm.get_case(case_id) if ( model_case.outputs[\"usability.min_max_step_height\"][1] <= problem_constants.step_height.upper and model_case.outputs[\"usability.min_max_step_depth\"][0]", "inputPoints[rowNr] if dominates( row, normalized ): # Går in om candidateRow är bättre", "range(0, Nrofrows): # summerar värden kolonnvis till nämnare för normalisering sum1 = sum1", "** 2 # definerar en vektor med normaliseringsvärden myarray_normalize = [sum1 ** 0.5,", "sum4 = 0 for i in range(0, len(normalizedPoints)): # summerar värden kolonnvis till", "inputPoints.remove(candidateRow) rowNr = 0 nonDominated = True while len(normalizedPoints) != 0 and rowNr", "i, ] def calculate(inputPoints, dominates): paretoPoints = set() candidateRowNr = 0 dominatedPoints =", "candidateRow = inputPoints[candidateRowNr] normalized = normalizedPoints[normalizedRowNr] normalizedPoints = np.delete(normalizedPoints, normalizedRowNr, 0) inputPoints.remove(candidateRow) rowNr", "på matrisen som kommer som paretopoints output Nrofcolumns = np.size(A, 1) Nrofrows =", "= inputPoints[rowNr] if dominates( row, normalized ): # Går in om candidateRow är", "A_negative[2]) ** 2 ) ** 0.5 C_value[i] = S_negative[i] / (S_negative[i] + S_positive[i])", "ta fram vilken rad i C_vektorn som har det största värdet Row_best_option =", "** 0.5, sum2 ** 0.5, sum3 ** 0.5, 1] # Normaliserar matrisen normalizedPoints", "myarray_normalize = [sum1 ** 0.5, sum2 ** 0.5, sum3 ** 0.5] # Normaliserar", "input_points = list(generate_valid_points(problem_constants, crm)) pareto_points, dominated_points, dp, pp = calculate(input_points, dominates) my_weights =" ]
[ "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "express or implied. See the License for the # specific language governing permissions", "# ############################################################################### # Module: alter_statement # Purpose: Represents SQL alter statements # #", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "(the # \"License\"); you may not use this file except in compliance #", "# Purpose: Represents SQL alter statements # # Notes: # ############################################################################### import data_pipeline.sql.utils", "kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self, applier): return applier.build_alter_sql(self) def", "License for the # specific language governing permissions and limitations # under the", "you under the Apache License, Version 2.0 (the # \"License\"); you may not", "(ASF) under one # or more contributor license agreements. See the NOTICE file", "License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS]", "super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY])", "kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self, applier): return applier.build_alter_sql(self) def __str__(self): return", "under the License. # ############################################################################### # Module: alter_statement # Purpose: Represents SQL alter", "Module: alter_statement # Purpose: Represents SQL alter statements # # Notes: # ###############################################################################", "this file # to you under the Apache License, Version 2.0 (the #", "# Notes: # ############################################################################### import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as const from", "############################################################################### import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as const from .ddl_statement import DdlStatement", "software distributed under the License is distributed on an # \"AS IS\" BASIS,", "law or agreed to in writing, # software distributed under the License is", "# # Unless required by applicable law or agreed to in writing, #", "# software distributed under the License is distributed on an # \"AS IS\"", "to you under the Apache License, Version 2.0 (the # \"License\"); you may", "kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self,", "if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME],", "ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self, **kwargs):", "file # distributed with this work for additional information # regarding copyright ownership.", "specific language governing permissions and limitations # under the License. # ############################################################################### #", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "# Licensed to the Apache Software Foundation (ASF) under one # or more", "Version 2.0 (the # \"License\"); you may not use this file except in", "for the # specific language governing permissions and limitations # under the License.", "under the Apache License, Version 2.0 (the # \"License\"); you may not use", "data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as const from .ddl_statement import DdlStatement class AlterStatement(DdlStatement):", "# Module: alter_statement # Purpose: Represents SQL alter statements # # Notes: #", "SQL alter statements # # Notes: # ############################################################################### import data_pipeline.sql.utils as sql_utils import", "const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def", "data_pipeline.constants.const as const from .ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary to", "= { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] }", "DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce a valid SQL ALTER statement\"\"\"", "copyright ownership. The ASF licenses this file # to you under the Apache", "ownership. The ASF licenses this file # to you under the Apache License,", "License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "valid SQL ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def", "under the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "additional information # regarding copyright ownership. The ASF licenses this file # to", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied.", "# \"License\"); you may not use this file except in compliance # with", "OF ANY # KIND, either express or implied. See the License for the", "or implied. See the License for the # specific language governing permissions and", "sql_utils import data_pipeline.constants.const as const from .ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data", "# Unless required by applicable law or agreed to in writing, # software", "############################################################################### # Module: alter_statement # Purpose: Represents SQL alter statements # # Notes:", "const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self, applier): return applier.build_alter_sql(self) def __str__(self): return sql_utils.build_alter_sql(self)", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "Licensed to the Apache Software Foundation (ASF) under one # or more contributor", "kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self, applier): return", "distributed under the License is distributed on an # \"AS IS\" BASIS, WITHOUT", "or more contributor license agreements. See the NOTICE file # distributed with this", "to in writing, # software distributed under the License is distributed on an", "const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self, applier):", "agreed to in writing, # software distributed under the License is distributed on", "OR CONDITIONS OF ANY # KIND, either express or implied. See the License", "Foundation (ASF) under one # or more contributor license agreements. See the NOTICE", "Purpose: Represents SQL alter statements # # Notes: # ############################################################################### import data_pipeline.sql.utils as", "def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY", "Apache Software Foundation (ASF) under one # or more contributor license agreements. See", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs:", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "limitations # under the License. # ############################################################################### # Module: alter_statement # Purpose: Represents", "License, Version 2.0 (the # \"License\"); you may not use this file except", "to the Apache Software Foundation (ASF) under one # or more contributor license", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "to produce a valid SQL ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type", "\"\"\"Contains data necessary to produce a valid SQL ALTER statement\"\"\" def __init__(self, table_name):", "this file except in compliance # with the License. You may obtain a", "class AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce a valid SQL ALTER statement\"\"\" def", "in compliance # with the License. You may obtain a copy of the", "implied. See the License for the # specific language governing permissions and limitations", "may not use this file except in compliance # with the License. You", "or agreed to in writing, # software distributed under the License is distributed", "statements # # Notes: # ############################################################################### import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as", "data necessary to produce a valid SQL ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement,", "use this file except in compliance # with the License. You may obtain", "and limitations # under the License. # ############################################################################### # Module: alter_statement # Purpose:", "ASF licenses this file # to you under the Apache License, Version 2.0", "kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS:", "{ const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry)", "Unless required by applicable law or agreed to in writing, # software distributed", "as const from .ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce", "Software Foundation (ASF) under one # or more contributor license agreements. See the", "distributed with this work for additional information # regarding copyright ownership. The ASF", "# distributed with this work for additional information # regarding copyright ownership. The", "license agreements. See the NOTICE file # distributed with this work for additional", "the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "\"License\"); you may not use this file except in compliance # with the", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self, applier): return applier.build_alter_sql(self)", "regarding copyright ownership. The ASF licenses this file # to you under the", "const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS: kwargs[const.CONSTRAINTS] } self.add_entry(alter_entry=alter_entry) def tosql(self, applier): return applier.build_alter_sql(self) def __str__(self):", "Represents SQL alter statements # # Notes: # ############################################################################### import data_pipeline.sql.utils as sql_utils", "# KIND, either express or implied. See the License for the # specific", "alter_statement # Purpose: Represents SQL alter statements # # Notes: # ############################################################################### import", "this work for additional information # regarding copyright ownership. The ASF licenses this", "with this work for additional information # regarding copyright ownership. The ASF licenses", "the # specific language governing permissions and limitations # under the License. #", "SQL ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self,", "ANY # KIND, either express or implied. See the License for the #", "See the NOTICE file # distributed with this work for additional information #", "contributor license agreements. See the NOTICE file # distributed with this work for", "either express or implied. See the License for the # specific language governing", "**kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME:", "the License. You may obtain a copy of the License at # #", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = {", "statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self, **kwargs): if", "the NOTICE file # distributed with this work for additional information # regarding", "in writing, # software distributed under the License is distributed on an #", "the Apache Software Foundation (ASF) under one # or more contributor license agreements.", "agreements. See the NOTICE file # distributed with this work for additional information", "not use this file except in compliance # with the License. You may", "import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce a valid SQL ALTER", "# ############################################################################### import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as const from .ddl_statement import", "= const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry =", "writing, # software distributed under the License is distributed on an # \"AS", "# specific language governing permissions and limitations # under the License. # ###############################################################################", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "# or more contributor license agreements. See the NOTICE file # distributed with", "const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE:", "See the License for the # specific language governing permissions and limitations #", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the", "AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce a valid SQL ALTER statement\"\"\" def __init__(self,", "NOTICE file # distributed with this work for additional information # regarding copyright", "a valid SQL ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "2.0 (the # \"License\"); you may not use this file except in compliance", "language governing permissions and limitations # under the License. # ############################################################################### # Module:", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as const from .ddl_statement import DdlStatement class", "__init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY in", "# with the License. You may obtain a copy of the License at", "# under the License. # ############################################################################### # Module: alter_statement # Purpose: Represents SQL", "KIND, either express or implied. See the License for the # specific language", "in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE],", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "# regarding copyright ownership. The ASF licenses this file # to you under", "the Apache License, Version 2.0 (the # \"License\"); you may not use this", "Apache License, Version 2.0 (the # \"License\"); you may not use this file", "more contributor license agreements. See the NOTICE file # distributed with this work", "under one # or more contributor license agreements. See the NOTICE file #", "alter statements # # Notes: # ############################################################################### import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const", "produce a valid SQL ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name) self.statement_type =", "# to you under the Apache License, Version 2.0 (the # \"License\"); you", "required by applicable law or agreed to in writing, # software distributed under", "self).__init__(table_name) self.statement_type = const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else:", "permissions and limitations # under the License. # ############################################################################### # Module: alter_statement #", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "# # Notes: # ############################################################################### import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as const", "compliance # with the License. You may obtain a copy of the License", "const from .ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce a", "governing permissions and limitations # under the License. # ############################################################################### # Module: alter_statement", "License. # ############################################################################### # Module: alter_statement # Purpose: Represents SQL alter statements #", "import data_pipeline.constants.const as const from .ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary", "Notes: # ############################################################################### import data_pipeline.sql.utils as sql_utils import data_pipeline.constants.const as const from .ddl_statement", "self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS],", "by applicable law or agreed to in writing, # software distributed under the", ".ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce a valid SQL", "def add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION:", "for additional information # regarding copyright ownership. The ASF licenses this file #", "necessary to produce a valid SQL ALTER statement\"\"\" def __init__(self, table_name): super(AlterStatement, self).__init__(table_name)", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See", "with the License. You may obtain a copy of the License at #", "information # regarding copyright ownership. The ASF licenses this file # to you", "The ASF licenses this file # to you under the Apache License, Version", "file except in compliance # with the License. You may obtain a copy", "CONDITIONS OF ANY # KIND, either express or implied. See the License for", "the License. # ############################################################################### # Module: alter_statement # Purpose: Represents SQL alter statements", "file # to you under the Apache License, Version 2.0 (the # \"License\");", "the License for the # specific language governing permissions and limitations # under", "from .ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains data necessary to produce a valid", "self.statement_type = const.ALTER def add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry", "add_entry(self, **kwargs): if const.ALTER_ENTRY in kwargs: self.entries.append(kwargs[const.ALTER_ENTRY]) else: alter_entry = { const.OPERATION: kwargs[const.OPERATION],", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "work for additional information # regarding copyright ownership. The ASF licenses this file", "applicable law or agreed to in writing, # software distributed under the License", "one # or more contributor license agreements. See the NOTICE file # distributed", "as sql_utils import data_pipeline.constants.const as const from .ddl_statement import DdlStatement class AlterStatement(DdlStatement): \"\"\"Contains", "except in compliance # with the License. You may obtain a copy of", "else: alter_entry = { const.OPERATION: kwargs[const.OPERATION], const.FIELD_NAME: kwargs[const.FIELD_NAME], const.DATA_TYPE: kwargs[const.DATA_TYPE], const.PARAMS: kwargs[const.PARAMS], const.CONSTRAINTS:", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "you may not use this file except in compliance # with the License.", "licenses this file # to you under the Apache License, Version 2.0 (the", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either" ]
[ "to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args = parser.parse_args() shader = open(args.patch,", "json import re import argparse parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified", "'r').read() patch = json.load(args.input) for k, v in patch.items(): shader = shader.replace(k, v)", "re import argparse parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified file to", "type=argparse.FileType('r'), help='Patch data (json)') args = parser.parse_args() shader = open(args.patch, 'r').read() patch =", "= json.load(args.input) for k, v in patch.items(): shader = shader.replace(k, v) open(args.patch, 'w').write(shader)", "patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args = parser.parse_args() shader = open(args.patch, 'r').read()", "data (json)') args = parser.parse_args() shader = open(args.patch, 'r').read() patch = json.load(args.input) for", "(json)') args = parser.parse_args() shader = open(args.patch, 'r').read() patch = json.load(args.input) for k,", "python3 import json import re import argparse parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch',", "minified shader') parser.add_argument('--patch', required=True, help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)')", "argparse parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified file to patch') parser.add_argument('input',", "shader') parser.add_argument('--patch', required=True, help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args", "help='Patch data (json)') args = parser.parse_args() shader = open(args.patch, 'r').read() patch = json.load(args.input)", "shader = open(args.patch, 'r').read() patch = json.load(args.input) for k, v in patch.items(): shader", "file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args = parser.parse_args() shader =", "#!/usr/bin/env python3 import json import re import argparse parser = argparse.ArgumentParser(description='Patch minified shader')", "args = parser.parse_args() shader = open(args.patch, 'r').read() patch = json.load(args.input) for k, v", "= open(args.patch, 'r').read() patch = json.load(args.input) for k, v in patch.items(): shader =", "parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'),", "help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args = parser.parse_args() shader", "parser.add_argument('--patch', required=True, help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args =", "import argparse parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified file to patch')", "open(args.patch, 'r').read() patch = json.load(args.input) for k, v in patch.items(): shader = shader.replace(k,", "import re import argparse parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified file", "parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args = parser.parse_args() shader = open(args.patch, 'r').read() patch", "= parser.parse_args() shader = open(args.patch, 'r').read() patch = json.load(args.input) for k, v in", "import json import re import argparse parser = argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True,", "required=True, help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data (json)') args = parser.parse_args()", "argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch data", "= argparse.ArgumentParser(description='Patch minified shader') parser.add_argument('--patch', required=True, help='Minified file to patch') parser.add_argument('input', type=argparse.FileType('r'), help='Patch", "patch = json.load(args.input) for k, v in patch.items(): shader = shader.replace(k, v) open(args.patch,", "<filename>helper_scripts/shaderpatch.py #!/usr/bin/env python3 import json import re import argparse parser = argparse.ArgumentParser(description='Patch minified", "parser.parse_args() shader = open(args.patch, 'r').read() patch = json.load(args.input) for k, v in patch.items():" ]
[]
[ "return data def load_data(): pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn) # load data", "save_object(pickle_fn, data) return data def test_load_csv(): print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data)", "\"\"\" Read in the data from csv files \"\"\" import pandas as pd", "# load data from pickle file if it exists obj = load_object(filename) if", "{} # load all csv files in data directory for f in glob.glob(os.path.join(\"data\",", "main(): data = load_data() print(\"Available DataSet Keys: \") for key in data.keys(): print(\"\\t\"+key)", "save_object, load_object def load_csv(filename): # load data from pickle file if it exists", "data # otherwise load from csv else: data = {} # load all", "pandas as pd import os import glob from save_data import save_object, load_object def", "save_object(filename, data) return data def load_data(): pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn) #", "def load_data(): pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn) # load data from pickle", "from save_data import save_object, load_object def load_csv(filename): # load data from pickle file", "# load data from pickle file if it exists if data != None:", "in the data from csv files \"\"\" import pandas as pd import os", "data != None: return data # otherwise load from csv else: data =", "it exists obj = load_object(filename) if obj != None: return obj # otherwise", "pd import os import glob from save_data import save_object, load_object def load_csv(filename): #", "= load_csv(f) save_object(pickle_fn, data) return data def test_load_csv(): print(\"Test loading csv\") data =", "glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key)", "None: return obj # otherwise load from csv else: data = pd.read_csv(filename, encoding=\"latin_1\")", "test_load_csv(): print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data = load_data()", "csv else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data def load_data(): pickle_fn", "data) return data def load_data(): pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn) # load", "data def load_data(): pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn) # load data from", "it exists if data != None: return data # otherwise load from csv", "data) return data def test_load_csv(): print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def", "os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data) return data def test_load_csv(): print(\"Test", "\"data/loaded_data\" data = load_object(pickle_fn) # load data from pickle file if it exists", "print(\"Available DataSet Keys: \") for key in data.keys(): print(\"\\t\"+key) if __name__ == \"__main__\":", "file if it exists if data != None: return data # otherwise load", "import save_object, load_object def load_csv(filename): # load data from pickle file if it", "from csv else: data = {} # load all csv files in data", "f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on their filename f_key = os.path.basename(f).split('.')[0]", "load data from pickle file if it exists obj = load_object(filename) if obj", "files \"\"\" import pandas as pd import os import glob from save_data import", "otherwise load from csv else: data = {} # load all csv files", "load_object(pickle_fn) # load data from pickle file if it exists if data !=", "files in data directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on", "# otherwise load from csv else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return", "import os import glob from save_data import save_object, load_object def load_csv(filename): # load", "# key based on their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] =", "data from csv files \"\"\" import pandas as pd import os import glob", "from pickle file if it exists if data != None: return data #", "their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data) return", "\"*.csv\")): # key based on their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key]", "= pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data def load_data(): pickle_fn = \"data/loaded_data\" data", "loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data = load_data() print(\"Available DataSet", "!= None: return obj # otherwise load from csv else: data = pd.read_csv(filename,", "data = load_data() print(\"Available DataSet Keys: \") for key in data.keys(): print(\"\\t\"+key) if", "DataSet Keys: \") for key in data.keys(): print(\"\\t\"+key) if __name__ == \"__main__\": main()", "import pandas as pd import os import glob from save_data import save_object, load_object", "f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data) return data def test_load_csv(): print(\"Test loading csv\")", "Read in the data from csv files \"\"\" import pandas as pd import", "exists if data != None: return data # otherwise load from csv else:", "obj != None: return obj # otherwise load from csv else: data =", "return data def test_load_csv(): print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main():", "= load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data = load_data() print(\"Available DataSet Keys: \") for", "data[f_key] = load_csv(f) save_object(pickle_fn, data) return data def test_load_csv(): print(\"Test loading csv\") data", "f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data) return data def", "from csv else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data def load_data():", "None: return data # otherwise load from csv else: data = {} #", "print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data = load_data() print(\"Available", "if obj != None: return obj # otherwise load from csv else: data", "= os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data) return data def test_load_csv():", "if it exists if data != None: return data # otherwise load from", "csv else: data = {} # load all csv files in data directory", "load data from pickle file if it exists if data != None: return", "load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data = load_data() print(\"Available DataSet Keys: \") for key", "obj = load_object(filename) if obj != None: return obj # otherwise load from", "as pd import os import glob from save_data import save_object, load_object def load_csv(filename):", "\"\"\" import pandas as pd import os import glob from save_data import save_object,", "if it exists obj = load_object(filename) if obj != None: return obj #", "key based on their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f)", "!= None: return data # otherwise load from csv else: data = {}", "load_csv(filename): # load data from pickle file if it exists obj = load_object(filename)", "= load_object(pickle_fn) # load data from pickle file if it exists if data", "exists obj = load_object(filename) if obj != None: return obj # otherwise load", "filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data) return data", "data = {} # load all csv files in data directory for f", "on their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data)", "print(data) def main(): data = load_data() print(\"Available DataSet Keys: \") for key in", "obj # otherwise load from csv else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data)", "data from pickle file if it exists obj = load_object(filename) if obj !=", "data directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on their filename", "in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\",", "load from csv else: data = {} # load all csv files in", "= \"data/loaded_data\" data = load_object(pickle_fn) # load data from pickle file if it", "directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on their filename f_key", "the data from csv files \"\"\" import pandas as pd import os import", "= load_object(filename) if obj != None: return obj # otherwise load from csv", "else: data = {} # load all csv files in data directory for", "load all csv files in data directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")): #", "load from csv else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data def", "pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn) # load data from pickle file if", "def main(): data = load_data() print(\"Available DataSet Keys: \") for key in data.keys():", "save_data import save_object, load_object def load_csv(filename): # load data from pickle file if", "load_object(filename) if obj != None: return obj # otherwise load from csv else:", "load_csv(f) save_object(pickle_fn, data) return data def test_load_csv(): print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\")", "pickle file if it exists if data != None: return data # otherwise", "= load_data() print(\"Available DataSet Keys: \") for key in data.keys(): print(\"\\t\"+key) if __name__", "all csv files in data directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key", "csv files \"\"\" import pandas as pd import os import glob from save_data", "def load_csv(filename): # load data from pickle file if it exists obj =", "pickle file if it exists obj = load_object(filename) if obj != None: return", "return obj # otherwise load from csv else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename,", "data def test_load_csv(): print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data", "os import glob from save_data import save_object, load_object def load_csv(filename): # load data", "import glob from save_data import save_object, load_object def load_csv(filename): # load data from", "csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data = load_data() print(\"Available DataSet Keys:", "= {} # load all csv files in data directory for f in", "data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data = load_data() print(\"Available DataSet Keys: \")", "glob from save_data import save_object, load_object def load_csv(filename): # load data from pickle", "otherwise load from csv else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data", "based on their filename f_key = os.path.basename(f).split('.')[0] print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn,", "data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data def load_data(): pickle_fn = \"data/loaded_data\"", "data = load_object(pickle_fn) # load data from pickle file if it exists if", "def test_load_csv(): print(\"Test loading csv\") data = load_csv(\"data/NCAATourneySeeds.csv\") print(data) def main(): data =", "csv files in data directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based", "for f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on their filename f_key =", "encoding=\"latin_1\") save_object(filename, data) return data def load_data(): pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn)", "data from pickle file if it exists if data != None: return data", "file if it exists obj = load_object(filename) if obj != None: return obj", "return data # otherwise load from csv else: data = {} # load", "print(\"Loading:\", f_key) data[f_key] = load_csv(f) save_object(pickle_fn, data) return data def test_load_csv(): print(\"Test loading", "from pickle file if it exists obj = load_object(filename) if obj != None:", "in data directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")): # key based on their", "# load all csv files in data directory for f in glob.glob(os.path.join(\"data\", \"*.csv\")):", "load_data() print(\"Available DataSet Keys: \") for key in data.keys(): print(\"\\t\"+key) if __name__ ==", "from csv files \"\"\" import pandas as pd import os import glob from", "load_data(): pickle_fn = \"data/loaded_data\" data = load_object(pickle_fn) # load data from pickle file", "# otherwise load from csv else: data = {} # load all csv", "pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data def load_data(): pickle_fn = \"data/loaded_data\" data =", "load_object def load_csv(filename): # load data from pickle file if it exists obj", "else: data = pd.read_csv(filename, encoding=\"latin_1\") save_object(filename, data) return data def load_data(): pickle_fn =", "if data != None: return data # otherwise load from csv else: data" ]
[ "out = timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup out = out[warmup:] # calculate", "/ len(out) std = (sum((x - mean) ** 2 for x in out)", "std = (sum((x - mean) ** 2 for x in out) / len(out))", "should be at leat 2x smaller than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number)", "timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup out = out[warmup:] # calculate statistics mean", "out[warmup:] # calculate statistics mean = sum(out) / len(out) std = (sum((x -", "(sum((x - mean) ** 2 for x in out) / len(out)) ** 0.5", "def benchmark_speed(benchmark_func, repeat=1000, number=1, warmup=100): assert repeat >= 2 * warmup, \"Warmup should", "warmup=100): assert repeat >= 2 * warmup, \"Warmup should be at leat 2x", "remove warmup out = out[warmup:] # calculate statistics mean = sum(out) / len(out)", "out = out[warmup:] # calculate statistics mean = sum(out) / len(out) std =", "number=number) # remove warmup out = out[warmup:] # calculate statistics mean = sum(out)", "\"Warmup should be at leat 2x smaller than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat,", "# remove warmup out = out[warmup:] # calculate statistics mean = sum(out) /", "warmup out = out[warmup:] # calculate statistics mean = sum(out) / len(out) std", "* warmup, \"Warmup should be at leat 2x smaller than repeat.\" out =", "2 for x in out) / len(out)) ** 0.5 return dict(min=min(out), max=max(out), mean=mean,", "leat 2x smaller than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup", "than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup out = out[warmup:]", "= sum(out) / len(out) std = (sum((x - mean) ** 2 for x", "2 * warmup, \"Warmup should be at leat 2x smaller than repeat.\" out", "assert repeat >= 2 * warmup, \"Warmup should be at leat 2x smaller", "mean) ** 2 for x in out) / len(out)) ** 0.5 return dict(min=min(out),", "at leat 2x smaller than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove", "be at leat 2x smaller than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number) #", "** 2 for x in out) / len(out)) ** 0.5 return dict(min=min(out), max=max(out),", "calculate statistics mean = sum(out) / len(out) std = (sum((x - mean) **", ">= 2 * warmup, \"Warmup should be at leat 2x smaller than repeat.\"", "x in out) / len(out)) ** 0.5 return dict(min=min(out), max=max(out), mean=mean, std=std, data=out)", "timeit def benchmark_speed(benchmark_func, repeat=1000, number=1, warmup=100): assert repeat >= 2 * warmup, \"Warmup", "mean = sum(out) / len(out) std = (sum((x - mean) ** 2 for", "warmup, \"Warmup should be at leat 2x smaller than repeat.\" out = timeit.repeat(benchmark_func,", "import timeit def benchmark_speed(benchmark_func, repeat=1000, number=1, warmup=100): assert repeat >= 2 * warmup,", "= timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup out = out[warmup:] # calculate statistics", "repeat=repeat, number=number) # remove warmup out = out[warmup:] # calculate statistics mean =", "2x smaller than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup out", "smaller than repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup out =", "repeat.\" out = timeit.repeat(benchmark_func, repeat=repeat, number=number) # remove warmup out = out[warmup:] #", "# calculate statistics mean = sum(out) / len(out) std = (sum((x - mean)", "len(out) std = (sum((x - mean) ** 2 for x in out) /", "repeat=1000, number=1, warmup=100): assert repeat >= 2 * warmup, \"Warmup should be at", "repeat >= 2 * warmup, \"Warmup should be at leat 2x smaller than", "statistics mean = sum(out) / len(out) std = (sum((x - mean) ** 2", "number=1, warmup=100): assert repeat >= 2 * warmup, \"Warmup should be at leat", "= out[warmup:] # calculate statistics mean = sum(out) / len(out) std = (sum((x", "sum(out) / len(out) std = (sum((x - mean) ** 2 for x in", "for x in out) / len(out)) ** 0.5 return dict(min=min(out), max=max(out), mean=mean, std=std,", "benchmark_speed(benchmark_func, repeat=1000, number=1, warmup=100): assert repeat >= 2 * warmup, \"Warmup should be", "<reponame>ToriML/DNN-bench<filename>bench/bench.py import timeit def benchmark_speed(benchmark_func, repeat=1000, number=1, warmup=100): assert repeat >= 2 *", "- mean) ** 2 for x in out) / len(out)) ** 0.5 return", "= (sum((x - mean) ** 2 for x in out) / len(out)) **" ]
[ "Vielfachen ist 23. Finden Sie die Summe aller Vielfachen von 3 oder 5", "Aufgabe 1 aus http://projecteuler.net (Deutsche Übersetzung auf http://projekteuler.de) Wenn wir alle natürlichen Zahlen", "Stück der eigentlichen Fage an. \"\"\" summe = # Los gehts ... print(summe)", "wir 3, 5, 6 und 9. Die Summe dieser Vielfachen ist 23. Finden", "vereinfachte Aufgaben, z. B. Finde alle Vielfache von 3 unter 20. Nähere die", "erhalten wir 3, 5, 6 und 9. Die Summe dieser Vielfachen ist 23.", "sind, so erhalten wir 3, 5, 6 und 9. Die Summe dieser Vielfachen", "(Deutsche Übersetzung auf http://projekteuler.de) Wenn wir alle natürlichen Zahlen unter 10 auflisten, die", "http://projecteuler.net (Deutsche Übersetzung auf http://projekteuler.de) Wenn wir alle natürlichen Zahlen unter 10 auflisten,", "die verschiedenen Teile. Löse erst vereinfachte Aufgaben, z. B. Finde alle Vielfache von", "1000. Lösungshilfe: Zerlege die Aufgabenstellung in die verschiedenen Teile. Löse erst vereinfachte Aufgaben,", "Aufgaben, z. B. Finde alle Vielfache von 3 unter 20. Nähere die Aufgabe", "Sie die Summe aller Vielfachen von 3 oder 5 unter 1000. Lösungshilfe: Zerlege", "alle Vielfache von 3 unter 20. Nähere die Aufgabe Stück für Stück der", "z. B. Finde alle Vielfache von 3 unter 20. Nähere die Aufgabe Stück", "Summe dieser Vielfachen ist 23. Finden Sie die Summe aller Vielfachen von 3", "Summe aller Vielfachen von 3 oder 5 unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung", "unter 10 auflisten, die Vielfache von 3 oder 5 sind, so erhalten wir", "Finde alle Vielfache von 3 unter 20. Nähere die Aufgabe Stück für Stück", "Vielfachen von 3 oder 5 unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung in die", "wir alle natürlichen Zahlen unter 10 auflisten, die Vielfache von 3 oder 5", "Zahlen unter 10 auflisten, die Vielfache von 3 oder 5 sind, so erhalten", "<filename>Projekteuler/projecteuler_aufgabe001.py \"\"\" Aufgabe 1 aus http://projecteuler.net (Deutsche Übersetzung auf http://projekteuler.de) Wenn wir alle", "Die Summe dieser Vielfachen ist 23. Finden Sie die Summe aller Vielfachen von", "die Vielfache von 3 oder 5 sind, so erhalten wir 3, 5, 6", "Übersetzung auf http://projekteuler.de) Wenn wir alle natürlichen Zahlen unter 10 auflisten, die Vielfache", "auflisten, die Vielfache von 3 oder 5 sind, so erhalten wir 3, 5,", "Lösungshilfe: Zerlege die Aufgabenstellung in die verschiedenen Teile. Löse erst vereinfachte Aufgaben, z.", "von 3 oder 5 unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung in die verschiedenen", "aus http://projecteuler.net (Deutsche Übersetzung auf http://projekteuler.de) Wenn wir alle natürlichen Zahlen unter 10", "3 unter 20. Nähere die Aufgabe Stück für Stück der eigentlichen Fage an.", "Zerlege die Aufgabenstellung in die verschiedenen Teile. Löse erst vereinfachte Aufgaben, z. B.", "ist 23. Finden Sie die Summe aller Vielfachen von 3 oder 5 unter", "verschiedenen Teile. Löse erst vereinfachte Aufgaben, z. B. Finde alle Vielfache von 3", "3, 5, 6 und 9. Die Summe dieser Vielfachen ist 23. Finden Sie", "natürlichen Zahlen unter 10 auflisten, die Vielfache von 3 oder 5 sind, so", "für Stück der eigentlichen Fage an. \"\"\" summe = # Los gehts ...", "auf http://projekteuler.de) Wenn wir alle natürlichen Zahlen unter 10 auflisten, die Vielfache von", "erst vereinfachte Aufgaben, z. B. Finde alle Vielfache von 3 unter 20. Nähere", "Finden Sie die Summe aller Vielfachen von 3 oder 5 unter 1000. Lösungshilfe:", "unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung in die verschiedenen Teile. Löse erst vereinfachte", "10 auflisten, die Vielfache von 3 oder 5 sind, so erhalten wir 3,", "aller Vielfachen von 3 oder 5 unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung in", "alle natürlichen Zahlen unter 10 auflisten, die Vielfache von 3 oder 5 sind,", "B. Finde alle Vielfache von 3 unter 20. Nähere die Aufgabe Stück für", "http://projekteuler.de) Wenn wir alle natürlichen Zahlen unter 10 auflisten, die Vielfache von 3", "\"\"\" Aufgabe 1 aus http://projecteuler.net (Deutsche Übersetzung auf http://projekteuler.de) Wenn wir alle natürlichen", "3 oder 5 unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung in die verschiedenen Teile.", "Vielfache von 3 unter 20. Nähere die Aufgabe Stück für Stück der eigentlichen", "Teile. Löse erst vereinfachte Aufgaben, z. B. Finde alle Vielfache von 3 unter", "20. Nähere die Aufgabe Stück für Stück der eigentlichen Fage an. \"\"\" summe", "Löse erst vereinfachte Aufgaben, z. B. Finde alle Vielfache von 3 unter 20.", "23. Finden Sie die Summe aller Vielfachen von 3 oder 5 unter 1000.", "3 oder 5 sind, so erhalten wir 3, 5, 6 und 9. Die", "1 aus http://projecteuler.net (Deutsche Übersetzung auf http://projekteuler.de) Wenn wir alle natürlichen Zahlen unter", "5 unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung in die verschiedenen Teile. Löse erst", "in die verschiedenen Teile. Löse erst vereinfachte Aufgaben, z. B. Finde alle Vielfache", "unter 20. Nähere die Aufgabe Stück für Stück der eigentlichen Fage an. \"\"\"", "Aufgabe Stück für Stück der eigentlichen Fage an. \"\"\" summe = # Los", "von 3 unter 20. Nähere die Aufgabe Stück für Stück der eigentlichen Fage", "oder 5 sind, so erhalten wir 3, 5, 6 und 9. Die Summe", "oder 5 unter 1000. Lösungshilfe: Zerlege die Aufgabenstellung in die verschiedenen Teile. Löse", "Stück für Stück der eigentlichen Fage an. \"\"\" summe = # Los gehts", "9. Die Summe dieser Vielfachen ist 23. Finden Sie die Summe aller Vielfachen", "die Aufgabe Stück für Stück der eigentlichen Fage an. \"\"\" summe = #", "Wenn wir alle natürlichen Zahlen unter 10 auflisten, die Vielfache von 3 oder", "Nähere die Aufgabe Stück für Stück der eigentlichen Fage an. \"\"\" summe =", "die Summe aller Vielfachen von 3 oder 5 unter 1000. Lösungshilfe: Zerlege die", "von 3 oder 5 sind, so erhalten wir 3, 5, 6 und 9.", "5, 6 und 9. Die Summe dieser Vielfachen ist 23. Finden Sie die", "so erhalten wir 3, 5, 6 und 9. Die Summe dieser Vielfachen ist", "Vielfache von 3 oder 5 sind, so erhalten wir 3, 5, 6 und", "dieser Vielfachen ist 23. Finden Sie die Summe aller Vielfachen von 3 oder", "und 9. Die Summe dieser Vielfachen ist 23. Finden Sie die Summe aller", "Aufgabenstellung in die verschiedenen Teile. Löse erst vereinfachte Aufgaben, z. B. Finde alle", "die Aufgabenstellung in die verschiedenen Teile. Löse erst vereinfachte Aufgaben, z. B. Finde", "6 und 9. Die Summe dieser Vielfachen ist 23. Finden Sie die Summe", "5 sind, so erhalten wir 3, 5, 6 und 9. Die Summe dieser" ]
[ "static classes. The resulting static class cannot be instantiated. If the __init__ method", "static(cls) -> Type: \"\"\" Decorator for defining static classes. The resulting static class", "defining static classes. The resulting static class cannot be instantiated. If the __init__", "static class cannot be instantiated. If the __init__ method is defined, then it", "classes. The resulting static class cannot be instantiated. If the __init__ method is", "import NoReturn, Type __all__ = [ 'static' ] def _raise_init(): raise NotImplementedError('Static classes", "def _raise_init(): raise NotImplementedError('Static classes cannot be instantiated') def static(cls) -> Type: \"\"\"", "\"\"\" Decorator for defining static classes. The resulting static class cannot be instantiated.", "instantiated') def static(cls) -> Type: \"\"\" Decorator for defining static classes. The resulting", "class cannot be instantiated. If the __init__ method is defined, then it is", "cannot be instantiated') def static(cls) -> Type: \"\"\" Decorator for defining static classes.", "be instantiated. If the __init__ method is defined, then it is invoked with", "defined, then it is invoked with None as the sole argument when the", "lambda _: None) cls.__new__ = on_init cls.__init__ = on_init cls.__call__ = on_init init_function(None)", "from typing import NoReturn, Type __all__ = [ 'static' ] def _raise_init(): raise", "NoReturn, Type __all__ = [ 'static' ] def _raise_init(): raise NotImplementedError('Static classes cannot", "Type: \"\"\" Decorator for defining static classes. The resulting static class cannot be", "] def _raise_init(): raise NotImplementedError('Static classes cannot be instantiated') def static(cls) -> Type:", "typing import NoReturn, Type __all__ = [ 'static' ] def _raise_init(): raise NotImplementedError('Static", "the __init__ method is defined, then it is invoked with None as the", "sole argument when the static class is defined. \"\"\" def on_init(*_args, **_kwargs) ->", "be instantiated') def static(cls) -> Type: \"\"\" Decorator for defining static classes. The", "def on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function = getattr(cls, '__init__', lambda _: None)", "Decorator for defining static classes. The resulting static class cannot be instantiated. If", "cannot be instantiated. If the __init__ method is defined, then it is invoked", "init_function = getattr(cls, '__init__', lambda _: None) cls.__new__ = on_init cls.__init__ = on_init", "as the sole argument when the static class is defined. \"\"\" def on_init(*_args,", "defined. \"\"\" def on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function = getattr(cls, '__init__', lambda", "Type __all__ = [ 'static' ] def _raise_init(): raise NotImplementedError('Static classes cannot be", "method is defined, then it is invoked with None as the sole argument", "None as the sole argument when the static class is defined. \"\"\" def", "the static class is defined. \"\"\" def on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function", "instantiated. If the __init__ method is defined, then it is invoked with None", "**_kwargs) -> NoReturn: _raise_init() init_function = getattr(cls, '__init__', lambda _: None) cls.__new__ =", "_: None) cls.__new__ = on_init cls.__init__ = on_init cls.__call__ = on_init init_function(None) return", "NoReturn: _raise_init() init_function = getattr(cls, '__init__', lambda _: None) cls.__new__ = on_init cls.__init__", "resulting static class cannot be instantiated. If the __init__ method is defined, then", "with None as the sole argument when the static class is defined. \"\"\"", "__all__ = [ 'static' ] def _raise_init(): raise NotImplementedError('Static classes cannot be instantiated')", "'static' ] def _raise_init(): raise NotImplementedError('Static classes cannot be instantiated') def static(cls) ->", "is defined. \"\"\" def on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function = getattr(cls, '__init__',", "__init__ method is defined, then it is invoked with None as the sole", "it is invoked with None as the sole argument when the static class", "= getattr(cls, '__init__', lambda _: None) cls.__new__ = on_init cls.__init__ = on_init cls.__call__", "is defined, then it is invoked with None as the sole argument when", "class is defined. \"\"\" def on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function = getattr(cls,", "The resulting static class cannot be instantiated. If the __init__ method is defined,", "the sole argument when the static class is defined. \"\"\" def on_init(*_args, **_kwargs)", "argument when the static class is defined. \"\"\" def on_init(*_args, **_kwargs) -> NoReturn:", "-> NoReturn: _raise_init() init_function = getattr(cls, '__init__', lambda _: None) cls.__new__ = on_init", "getattr(cls, '__init__', lambda _: None) cls.__new__ = on_init cls.__init__ = on_init cls.__call__ =", "for defining static classes. The resulting static class cannot be instantiated. If the", "classes cannot be instantiated') def static(cls) -> Type: \"\"\" Decorator for defining static", "is invoked with None as the sole argument when the static class is", "_raise_init(): raise NotImplementedError('Static classes cannot be instantiated') def static(cls) -> Type: \"\"\" Decorator", "If the __init__ method is defined, then it is invoked with None as", "then it is invoked with None as the sole argument when the static", "-> Type: \"\"\" Decorator for defining static classes. The resulting static class cannot", "when the static class is defined. \"\"\" def on_init(*_args, **_kwargs) -> NoReturn: _raise_init()", "def static(cls) -> Type: \"\"\" Decorator for defining static classes. The resulting static", "NotImplementedError('Static classes cannot be instantiated') def static(cls) -> Type: \"\"\" Decorator for defining", "invoked with None as the sole argument when the static class is defined.", "static class is defined. \"\"\" def on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function =", "\"\"\" def on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function = getattr(cls, '__init__', lambda _:", "_raise_init() init_function = getattr(cls, '__init__', lambda _: None) cls.__new__ = on_init cls.__init__ =", "raise NotImplementedError('Static classes cannot be instantiated') def static(cls) -> Type: \"\"\" Decorator for", "= [ 'static' ] def _raise_init(): raise NotImplementedError('Static classes cannot be instantiated') def", "None) cls.__new__ = on_init cls.__init__ = on_init cls.__call__ = on_init init_function(None) return cls", "'__init__', lambda _: None) cls.__new__ = on_init cls.__init__ = on_init cls.__call__ = on_init", "on_init(*_args, **_kwargs) -> NoReturn: _raise_init() init_function = getattr(cls, '__init__', lambda _: None) cls.__new__", "[ 'static' ] def _raise_init(): raise NotImplementedError('Static classes cannot be instantiated') def static(cls)" ]
[ "chunk.headers, chunk.url chunk.close() return (headers, url) def _normalise_url(parsed, remove_frag: bool = True): d:", "UA_d = \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\"", "Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d =", "NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\":", "quote and s[-1] == quote: return s[1:-1] return s def int_or_none(i: any): if", "parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d) def", "open(_path_join(script_dir, \"mimes.json\")) as f: mime_types = json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0;", "d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d) def remove_quotes(s): if s", "Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\":", "for _ in chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url chunk.close() return (headers, url)", "= \"\" return ParseResult(**d) def remove_quotes(s): if s is None or len(s) <", "chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url chunk.close() return (headers, url) def _normalise_url(parsed, remove_frag:", "True): d: dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] =", "def int_or_none(i: any): if isinstance(i, int): return i try: return int(i) except: return", "WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip, deflate\",", "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\":", "realpath(__file__) script_dir = dirname(script_loc) del dirname del realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\"))", "UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko)", "wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows", "url, headers=basic_headers, allow_redirects=True, stream=True ) as chunk: for _ in chunk.iter_content(byte_len): headers, url", "chunk.close() return (headers, url) def _normalise_url(parsed, remove_frag: bool = True): d: dict =", "for quote in ('\"', \"'\"): if s[0] == quote and s[-1] == quote:", "quote: return s[1:-1] return s def int_or_none(i: any): if isinstance(i, int): return i", "\"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len: int = 1024): with requests.get( url, headers=basic_headers,", "basic_headers = { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\":", "= \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0", "return s def int_or_none(i: any): if isinstance(i, int): return i try: return int(i)", "10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip,", "dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\" return", "2: return s for quote in ('\"', \"'\"): if s[0] == quote and", "realpath, dirname, join as _path_join import requests from json import load as json_load", "< 2: return s for quote in ('\"', \"'\"): if s[0] == quote", "del realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types = json_load(f) UA_m", "= dirname(script_loc) del dirname del realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as f:", "requests from json import load as json_load script_loc = realpath(__file__) script_dir = dirname(script_loc)", "_abort_request_after(url: str, byte_len: int = 1024): with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True )", ") as chunk: for _ in chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url chunk.close()", "\"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", }", "as f: mime_types = json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029;", "UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str,", "= chunk.headers, chunk.url chunk.close() return (headers, url) def _normalise_url(parsed, remove_frag: bool = True):", "\"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len:", "int_or_none(i: any): if isinstance(i, int): return i try: return int(i) except: return None", "join as _path_join import requests from json import load as json_load script_loc =", "s def int_or_none(i: any): if isinstance(i, int): return i try: return int(i) except:", "def remove_quotes(s): if s is None or len(s) < 2: return s for", "_normalise_url(parsed, remove_frag: bool = True): d: dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"]", "dirname, join as _path_join import requests from json import load as json_load script_loc", "Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko)", "= d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d) def remove_quotes(s): if s is None", "_path_join import requests from json import load as json_load script_loc = realpath(__file__) script_dir", "Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\",", "d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d) def remove_quotes(s): if s is", "\"mimes.json\")) as f: mime_types = json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel", "8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d", "and s[-1] == quote: return s[1:-1] return s def int_or_none(i: any): if isinstance(i,", "AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT", "{ \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\":", "int = 1024): with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True ) as chunk: for", "bool = True): d: dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower()", "import load as json_load script_loc = realpath(__file__) script_dir = dirname(script_loc) del dirname del", "Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like", "\"\" return ParseResult(**d) def remove_quotes(s): if s is None or len(s) < 2:", "def _abort_request_after(url: str, byte_len: int = 1024): with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True", "mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types = json_load(f) UA_m = \"Mozilla/5.0", "byte_len: int = 1024): with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True ) as chunk:", "\"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len: int = 1024): with requests.get(", "(headers, url) def _normalise_url(parsed, remove_frag: bool = True): d: dict = parsed._asdict() d[\"scheme\"]", "= parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d)", "== quote: return s[1:-1] return s def int_or_none(i: any): if isinstance(i, int): return", "\"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len: int =", "or len(s) < 2: return s for quote in ('\"', \"'\"): if s[0]", "= \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers", "(Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile", "as _path_join import requests from json import load as json_load script_loc = realpath(__file__)", "from os.path import realpath, dirname, join as _path_join import requests from json import", "\"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len: int = 1024):", "as chunk: for _ in chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url chunk.close() return", "stream=True ) as chunk: for _ in chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url", "realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types = json_load(f) UA_m =", "s[1:-1] return s def int_or_none(i: any): if isinstance(i, int): return i try: return", "deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def", "(Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = {", "if s is None or len(s) < 2: return s for quote in", "url = chunk.headers, chunk.url chunk.close() return (headers, url) def _normalise_url(parsed, remove_frag: bool =", "d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d) def remove_quotes(s): if s is None or", "quote in ('\"', \"'\"): if s[0] == quote and s[-1] == quote: return", "Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\",", "Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML,", "s[0] == quote and s[-1] == quote: return s[1:-1] return s def int_or_none(i:", "chunk.url chunk.close() return (headers, url) def _normalise_url(parsed, remove_frag: bool = True): d: dict", "str, byte_len: int = 1024): with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True ) as", "\"'\"): if s[0] == quote and s[-1] == quote: return s[1:-1] return s", "= { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\",", "like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT 10.0; WOW64)", "= realpath(__file__) script_dir = dirname(script_loc) del dirname del realpath mime_types: dict with open(_path_join(script_dir,", "from json import load as json_load script_loc = realpath(__file__) script_dir = dirname(script_loc) del", "with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types = json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android", "\"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len: int = 1024): with", "import requests from json import load as json_load script_loc = realpath(__file__) script_dir =", "1024): with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True ) as chunk: for _ in", "= d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d) def remove_quotes(s): if", "import ParseResult from os.path import realpath, dirname, join as _path_join import requests from", "\"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len: int = 1024): with requests.get( url,", "\"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url: str, byte_len: int", "in chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url chunk.close() return (headers, url) def _normalise_url(parsed,", "from urllib.parse import ParseResult from os.path import realpath, dirname, join as _path_join import", "return s for quote in ('\"', \"'\"): if s[0] == quote and s[-1]", "(KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT 10.0;", "def _normalise_url(parsed, remove_frag: bool = True): d: dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower()", "dict with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types = json_load(f) UA_m = \"Mozilla/5.0 (Linux;", "None or len(s) < 2: return s for quote in ('\"', \"'\"): if", "Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\"", "ParseResult(**d) def remove_quotes(s): if s is None or len(s) < 2: return s", "urllib.parse import ParseResult from os.path import realpath, dirname, join as _path_join import requests", "\"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109", "chunk: for _ in chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url chunk.close() return (headers,", "\"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers =", "url) def _normalise_url(parsed, remove_frag: bool = True): d: dict = parsed._asdict() d[\"scheme\"] =", "len(s) < 2: return s for quote in ('\"', \"'\"): if s[0] ==", "s[-1] == quote: return s[1:-1] return s def int_or_none(i: any): if isinstance(i, int):", "if s[0] == quote and s[-1] == quote: return s[1:-1] return s def", "return s[1:-1] return s def int_or_none(i: any): if isinstance(i, int): return i try:", "= True): d: dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"]", "d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\" return ParseResult(**d) def remove_quotes(s):", "script_dir = dirname(script_loc) del dirname del realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as", "ParseResult from os.path import realpath, dirname, join as _path_join import requests from json", "json import load as json_load script_loc = realpath(__file__) script_dir = dirname(script_loc) del dirname", "dirname del realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types = json_load(f)", "os.path import realpath, dirname, join as _path_join import requests from json import load", "\"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\",", "in ('\"', \"'\"): if s[0] == quote and s[-1] == quote: return s[1:-1]", "Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3526.73", "json_load script_loc = realpath(__file__) script_dir = dirname(script_loc) del dirname del realpath mime_types: dict", "(KHTML, like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d,", "import realpath, dirname, join as _path_join import requests from json import load as", "Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0", "Gecko) Version/4.0 Chrome/68.0.3325.109 Mobile Safari/537.36\" UA_d = \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36", "\"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\": \"1\", \"Accept-Language\": \"en-GB,en-US;q=0.9,en;q=0.8\", \"dnt\": \"1\", \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\", } def _abort_request_after(url:", "return ParseResult(**d) def remove_quotes(s): if s is None or len(s) < 2: return", "del dirname del realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types =", "like Gecko) Chrome/67.0.3526.73 Safari/537.36\" basic_headers = { \"Accept-Encoding\": \"gzip, deflate\", \"User-Agent\": UA_d, \"Upgrade-Insecure-Requests\":", "allow_redirects=True, stream=True ) as chunk: for _ in chunk.iter_content(byte_len): headers, url = chunk.headers,", "f: mime_types = json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv)", "load as json_load script_loc = realpath(__file__) script_dir = dirname(script_loc) del dirname del realpath", "dirname(script_loc) del dirname del realpath mime_types: dict with open(_path_join(script_dir, \"mimes.json\")) as f: mime_types", "mime_types = json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36", "s is None or len(s) < 2: return s for quote in ('\"',", "== quote and s[-1] == quote: return s[1:-1] return s def int_or_none(i: any):", "as json_load script_loc = realpath(__file__) script_dir = dirname(script_loc) del dirname del realpath mime_types:", "script_loc = realpath(__file__) script_dir = dirname(script_loc) del dirname del realpath mime_types: dict with", "headers, url = chunk.headers, chunk.url chunk.close() return (headers, url) def _normalise_url(parsed, remove_frag: bool", "with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True ) as chunk: for _ in chunk.iter_content(byte_len):", "_ in chunk.iter_content(byte_len): headers, url = chunk.headers, chunk.url chunk.close() return (headers, url) def", "remove_frag: bool = True): d: dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] =", "d[\"fragment\"] = \"\" return ParseResult(**d) def remove_quotes(s): if s is None or len(s)", "('\"', \"'\"): if s[0] == quote and s[-1] == quote: return s[1:-1] return", "requests.get( url, headers=basic_headers, allow_redirects=True, stream=True ) as chunk: for _ in chunk.iter_content(byte_len): headers,", "= 1024): with requests.get( url, headers=basic_headers, allow_redirects=True, stream=True ) as chunk: for _", "return (headers, url) def _normalise_url(parsed, remove_frag: bool = True): d: dict = parsed._asdict()", "json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML, like", "= json_load(f) UA_m = \"Mozilla/5.0 (Linux; Android 8.1.0; Pixel Build/OPM2.171019.029; wv) AppleWebKit/537.36 (KHTML,", "d: dict = parsed._asdict() d[\"scheme\"] = d[\"scheme\"].lower() d[\"netloc\"] = d[\"netloc\"].lower() d[\"fragment\"] = \"\"", "s for quote in ('\"', \"'\"): if s[0] == quote and s[-1] ==", "headers=basic_headers, allow_redirects=True, stream=True ) as chunk: for _ in chunk.iter_content(byte_len): headers, url =", "} def _abort_request_after(url: str, byte_len: int = 1024): with requests.get( url, headers=basic_headers, allow_redirects=True,", "remove_quotes(s): if s is None or len(s) < 2: return s for quote", "is None or len(s) < 2: return s for quote in ('\"', \"'\"):" ]
[ "embed = disnake.Embed(description=\"Command requires you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await", "def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms =", "str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not perms: embed", "None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\":", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green())", "= f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\"", "disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not", "lbh = results.get(\"lbhour\") lbh = real_times[lbh - 5] lbh = f\"<t:1643263200:t>\" except: pass", "f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time:", "= None if autoboard_type == \"Clan Leaderboard\": rr = [] tracked = clans.find({\"server\":", "= None th = None lbc = None lbh = None country =", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await", "pass try: lbh = results.get(\"lbhour\") lbh = real_times[lbh - 5] lbh = f\"<t:1643263200:t>\"", "= lbc.mention except: pass try: lbh = results.get(\"lbhour\") lbh = real_times[lbh - 5]", "self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[]) break if res.author.id != ctx.author.id: await res.send(content=\"You", "= await pingToChannel(ctx, tbc) tbc = tbc.mention except: pass try: th = results.get(\"tophour\")", "f\"<t:1643263200:t>\" except: pass try: country = results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard List**\",", "= disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, # the minimum number of options a", "autoboard_type == \"Clan Leaderboard\": rr = [] tracked = clans.find({\"server\": ctx.guild.id}) limit =", "in range(0, 24): t = start_time + (x * 3600) real_times.append(t) try: tbc", "leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\", color=disnake.Color.green()) await ctx.send(embed=embed) def setup(bot:", "description=\"Remove a server autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player", "autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id == msg.id country", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed", "from disnake.ext import commands from utils.clash import client, pingToChannel, getClan import disnake usafam", "await getClan(tag) location = str(c.location) if location not in rr: rr.append(str(location)) options =", "def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])):", "ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id == msg.id country = False while", "embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View", "color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction,", "try: lbc = results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc) lbc = lbc.mention except:", "5] lbh = f\"<t:1643263200:t>\" except: pass try: country = results.get(\"country\") except: pass embed", "lbc.mention except: pass try: lbh = results.get(\"lbhour\") lbh = real_times[lbh - 5] lbh", "lbc = await pingToChannel(ctx, lbc) lbc = lbc.mention except: pass try: lbh =", "components.\", ephemeral=True) continue country = str(res.values[0]) tex = \"\" if autoboard_type == \"Player", "the command to interact with components.\", ephemeral=True) continue country = str(res.values[0]) tex =", "= [] start_time = 1643263200 for x in range(0, 24): t = start_time", "@autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str =", "{time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async def", "= usafam.clans server = usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot: commands.Bot):", "autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction,", "= str(c.location) if location not in rr: rr.append(str(location)) options = [] for country", "except: pass try: country = results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player", "(x * 3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc)", "f\"Player leaderboard Post Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post Time:", "= await getClan(tag) location = str(c.location) if location not in rr: rr.append(str(location)) options", "= real_times[th - 5] th = f\"<t:1643263200:t>\" except: pass try: lbc = results.get(\"lbboardChannel\")", "command to interact with components.\", ephemeral=True) continue country = str(res.values[0]) tex = \"\"", "clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await tracked.to_list(length=limit): tag = clan.get(\"tag\") c = await", "autoboards\") async def boardlist(self, ctx): tbc = None th = None lbc =", "await ctx.send(embed=embed) await ctx.response.defer() msg = await ctx.original_message() country = None if autoboard_type", "to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type == \"Player", "color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer() msg = await ctx.original_message() country = None", "results.get(\"lbhour\") lbh = real_times[lbh - 5] lbh = f\"<t:1643263200:t>\" except: pass try: country", "{\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed =", "try: lbh = results.get(\"lbhour\") lbh = real_times[lbh - 5] lbh = f\"<t:1643263200:t>\" except:", "country == False: try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await", "ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await server.update_one({\"server\":", "options=options, placeholder=\"Page Navigation\", min_values=1, # the minimum number of options a user must", "country = False while country == False: try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\",", "pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post Time:", "= disnake.Embed(description=\"Command requires you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed)", "= None country = None results = await server.find_one({\"server\": ctx.guild.id}) real_times = []", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await", "number of options a user must select max_values=1 # the maximum number of", "None lbh = None country = None results = await server.find_one({\"server\": ctx.guild.id}) real_times", "disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\")", "autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id},", "ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def boardlist(self, ctx): tbc = None", "lbc = results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc) lbc = lbc.mention except: pass", "5] th = f\"<t:1643263200:t>\" except: pass try: lbc = results.get(\"lbboardChannel\") lbc = await", "ctx.guild.id}) for clan in await tracked.to_list(length=limit): tag = clan.get(\"tag\") c = await getClan(tag)", "channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}})", "ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await", "try: country = results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel:", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex", "location not in rr: rr.append(str(location)) options = [] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\",", "timeout=600) except: await msg.edit(components=[]) break if res.author.id != ctx.author.id: await res.send(content=\"You must run", "commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server", "t = start_time + (x * 3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc", "await ctx.response.defer() msg = await ctx.original_message() country = None if autoboard_type == \"Clan", "ctx.send(embed=embed) await ctx.response.defer() msg = await ctx.original_message() country = None if autoboard_type ==", "tbc = tbc.mention except: pass try: th = results.get(\"tophour\") th = real_times[th -", "disnake.MessageInteraction): return res.message.id == msg.id country = False while country == False: try:", "country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, #", "\"\" if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await", "== \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set':", "leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\",", ") action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country would you like", "action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country would you like the", "ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\":", "start_time + (x * 3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc = await", "ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry:", "{'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id},", "embed = disnake.Embed(title=\"**For what country would you like the leaderboard autoboard?**\", color=disnake.Color.green()) await", "has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def", "{country}\" time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\"", "{\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set':", "= results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc) tbc = tbc.mention except: pass try:", "= clan.get(\"tag\") c = await getClan(tag) location = str(c.location) if location not in", "ephemeral=True) continue country = str(res.values[0]) tex = \"\" if autoboard_type == \"Player Leaderboard\":", "pass try: lbc = results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc) lbc = lbc.mention", "= results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player", "have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer() msg = await", "def check(res: disnake.MessageInteraction): return res.message.id == msg.id country = False while country ==", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}})", "the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id ==", "!= ctx.author.id: await res.send(content=\"You must run the command to interact with components.\", ephemeral=True)", "[] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\",", "Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\":", "number of options a user can select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed", "Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard", "server = usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot: commands.Bot): self.bot =", "\"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\":", "rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, # the minimum", "None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\",", "@autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type:", "{'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard", "res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[]) break if res.author.id", "{\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\" time", "autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not perms:", "= f\"<t:1643263200:t>\" except: pass try: lbc = results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc)", "disnake usafam = client.usafam clans = usafam.clans server = usafam.server class autoB(commands.Cog, name=\"Board", "ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id},", "res.message.id == msg.id country = False while country == False: try: res: disnake.MessageInteraction", "return await ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\":", "= real_times[lbh - 5] lbh = f\"<t:1643263200:t>\" except: pass try: country = results.get(\"country\")", "setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms", "import client, pingToChannel, getClan import disnake usafam = client.usafam clans = usafam.clans server", "options = [] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options,", "real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc) tbc = tbc.mention", "except: await msg.edit(components=[]) break if res.author.id != ctx.author.id: await res.send(content=\"You must run the", "= f\"<t:1643263200:t>\" except: pass try: country = results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard", "usafam = client.usafam clans = usafam.clans server = usafam.server class autoB(commands.Cog, name=\"Board Setup\"):", "Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not perms: embed = disnake.Embed(description=\"Command requires", "1643263200 for x in range(0, 24): t = start_time + (x * 3600)", "pass try: th = results.get(\"tophour\") th = real_times[th - 5] th = f\"<t:1643263200:t>\"", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else:", "country = None if autoboard_type == \"Clan Leaderboard\": rr = [] tracked =", "{'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set':", "lbh = None country = None results = await server.find_one({\"server\": ctx.guild.id}) real_times =", "pingToChannel, getClan import disnake usafam = client.usafam clans = usafam.clans server = usafam.server", "{'$set': {\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id},", "user can select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country", "tbc = await pingToChannel(ctx, tbc) tbc = tbc.mention except: pass try: th =", "None results = await server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time = 1643263200 for", "= bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\")", "leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id == msg.id", "- 5] lbh = f\"<t:1643263200:t>\" except: pass try: country = results.get(\"country\") except: pass", "Navigation\", min_values=1, # the minimum number of options a user must select max_values=1", "ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green()) await", "maximum number of options a user can select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1)", "= False while country == False: try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check,", "client.usafam clans = usafam.clans server = usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self,", "@autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def boardlist(self, ctx): tbc = None th =", "{'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard", "utils.clash import client, pingToChannel, getClan import disnake usafam = client.usafam clans = usafam.clans", "you like the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return", "description=\"View server autoboards\") async def boardlist(self, ctx): tbc = None th = None", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\",", "== msg.id country = False while country == False: try: res: disnake.MessageInteraction =", "start_time = 1643263200 for x in range(0, 24): t = start_time + (x", "ctx.response.defer() msg = await ctx.original_message() country = None if autoboard_type == \"Clan Leaderboard\":", "a user can select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what", "= await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[]) break if res.author.id != ctx.author.id:", "None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been", "{'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed,", "{'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set':", "ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\":", "await ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}})", "rr = [] tracked = clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for", "run the command to interact with components.\", ephemeral=True) continue country = str(res.values[0]) tex", "ctx.author.guild_permissions.manage_guild if not perms: embed = disnake.Embed(description=\"Command requires you to have `Manage Server`", "range(0, 24): t = start_time + (x * 3600) real_times.append(t) try: tbc =", "color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id == msg.id country =", "check=check, timeout=600) except: await msg.edit(components=[]) break if res.author.id != ctx.author.id: await res.send(content=\"You must", "color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set':", "None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}})", "for x in range(0, 24): t = start_time + (x * 3600) real_times.append(t)", "disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild", "you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer() msg", "min_values=1, # the minimum number of options a user must select max_values=1 #", "th = f\"<t:1643263200:t>\" except: pass try: lbc = results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx,", "would you like the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction):", "5}}) tex = f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\",", "`Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer() msg = await ctx.original_message()", "await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id == msg.id country = False", "removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def boardlist(self, ctx):", "disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[]) break if res.author.id !=", "= disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\" f\"Clan", "= results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc) lbc = lbc.mention except: pass try:", "channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\":", "disnake.Embed(description=\"Command requires you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if", "to interact with components.\", ephemeral=True) continue country = str(res.values[0]) tex = \"\" if", "if not perms: embed = disnake.Embed(description=\"Command requires you to have `Manage Server` permissions.\",", "{\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has", "permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer() msg = await ctx.original_message() country =", "= tbc.mention except: pass try: th = results.get(\"tophour\") th = real_times[th - 5]", "{\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\":", "== \"Clan Leaderboard\": rr = [] tracked = clans.find({\"server\": ctx.guild.id}) limit = await", "with components.\", ephemeral=True) continue country = str(res.values[0]) tex = \"\" if autoboard_type ==", "= None results = await server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time = 1643263200", "tex = f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel:", "leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\", color=disnake.Color.green())", "lbc = None lbh = None country = None results = await server.find_one({\"server\":", "client, pingToChannel, getClan import disnake usafam = client.usafam clans = usafam.clans server =", "= commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not perms: embed =", "else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}})", "th = real_times[th - 5] th = f\"<t:1643263200:t>\" except: pass try: lbc =", "\"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not perms: embed = disnake.Embed(description=\"Command requires you", "max_values=1 # the maximum number of options a user can select ) action_row", "ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\":", "server autoboards\") async def boardlist(self, ctx): tbc = None th = None lbc", "{'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id},", "country would you like the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res:", "server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time = 1643263200 for x in range(0, 24):", "of options a user must select max_values=1 # the maximum number of options", "country = str(res.values[0]) tex = \"\" if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\":", "@commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async def", "removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild", "except: pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post", "have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\":", "else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}})", "clan in await tracked.to_list(length=limit): tag = clan.get(\"tag\") c = await getClan(tag) location =", "disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, # the minimum number of options a user", "await pingToChannel(ctx, lbc) lbc = lbc.mention except: pass try: lbh = results.get(\"lbhour\") lbh", "like the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id", "Setup\"): def __init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx):", "= f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\",", "except: pass try: lbh = results.get(\"lbhour\") lbh = real_times[lbh - 5] lbh =", "= disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server", "ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if", "msg.edit(components=[]) break if res.author.id != ctx.author.id: await res.send(content=\"You must run the command to", "= await server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time = 1643263200 for x in", "options a user can select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For", "msg.id country = False while country == False: try: res: disnake.MessageInteraction = await", "usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\")", "bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create", "continue country = str(res.values[0]) tex = \"\" if autoboard_type == \"Player Leaderboard\": await", "def boardlist(self, ctx): tbc = None th = None lbc = None lbh", "placeholder=\"Page Navigation\", min_values=1, # the minimum number of options a user must select", "False: try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[]) break", "def __init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass", "results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard", "Leaderboard\": rr = [] tracked = clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id})", "except: pass try: th = results.get(\"tophour\") th = real_times[th - 5] th =", "rr.append(str(location)) options = [] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select(", "{autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async def removeboard(self, ctx:", "= results.get(\"lbhour\") lbh = real_times[lbh - 5] lbh = f\"<t:1643263200:t>\" except: pass try:", "= disnake.Embed(title=\"**For what country would you like the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed,", "options a user must select max_values=1 # the maximum number of options a", "{\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set':", "disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\",", "{\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\":", "Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}})", "time = f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type:", "None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\":", "results = await server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time = 1643263200 for x", "real_times[lbh - 5] lbh = f\"<t:1643263200:t>\" except: pass try: country = results.get(\"country\") except:", "components=[action_row]) def check(res: disnake.MessageInteraction): return res.message.id == msg.id country = False while country", "ctx): tbc = None th = None lbc = None lbh = None", "permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id},", "a server autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\",", "lbc = lbc.mention except: pass try: lbh = results.get(\"lbhour\") lbh = real_times[lbh -", "await pingToChannel(ctx, tbc) tbc = tbc.mention except: pass try: th = results.get(\"tophour\") th", "= client.usafam clans = usafam.clans server = usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def", "import commands from utils.clash import client, pingToChannel, getClan import disnake usafam = client.usafam", "ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type}", "not perms: embed = disnake.Embed(description=\"Command requires you to have `Manage Server` permissions.\", color=disnake.Color.red())", "`Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type == \"Player Leaderboard\": await", "async def boardlist(self, ctx): tbc = None th = None lbc = None", "ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await tracked.to_list(length=limit): tag =", "embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\"", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\":", "perms: embed = disnake.Embed(description=\"Command requires you to have `Manage Server` permissions.\", color=disnake.Color.red()) return", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await", "{\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set':", "ctx.guild.id}) real_times = [] start_time = 1643263200 for x in range(0, 24): t", "interact with components.\", ephemeral=True) continue country = str(res.values[0]) tex = \"\" if autoboard_type", "the maximum number of options a user can select ) action_row = disnake.ui.ActionRow()", "{lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\", color=disnake.Color.green()) await ctx.send(embed=embed)", "options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, # the minimum number", "Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove", "{\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set':", "for clan in await tracked.to_list(length=limit): tag = clan.get(\"tag\") c = await getClan(tag) location", "bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async", "f\"<t:{1643263200}:t>\" embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green())", "tbc = results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc) tbc = tbc.mention except: pass", "await msg.edit(components=[]) break if res.author.id != ctx.author.id: await res.send(content=\"You must run the command", "\"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\":", "[] start_time = 1643263200 for x in range(0, 24): t = start_time +", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await", "channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if", "str(c.location) if location not in rr: rr.append(str(location)) options = [] for country in", "not in rr: rr.append(str(location)) options = [] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\"))", "= \"\" if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}})", "= await ctx.original_message() country = None if autoboard_type == \"Clan Leaderboard\": rr =", "msg = await ctx.original_message() country = None if autoboard_type == \"Clan Leaderboard\": rr", "res.send(content=\"You must run the command to interact with components.\", ephemeral=True) continue country =", "tracked.to_list(length=limit): tag = clan.get(\"tag\") c = await getClan(tag) location = str(c.location) if location", "== False: try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[])", "= None lbh = None country = None results = await server.find_one({\"server\": ctx.guild.id})", "f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\", color=disnake.Color.green()) await ctx.send(embed=embed) def", "th = results.get(\"tophour\") th = real_times[th - 5] th = f\"<t:1643263200:t>\" except: pass", "of options a user can select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed =", "return res.message.id == msg.id country = False while country == False: try: res:", "= ctx.author.guild_permissions.manage_guild if not perms: embed = disnake.Embed(description=\"Command requires you to have `Manage", "disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not", "c = await getClan(tag) location = str(c.location) if location not in rr: rr.append(str(location))", "= [] tracked = clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan", "ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\": ctx.guild.id},", "been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def boardlist(self,", "await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await tracked.to_list(length=limit): tag = clan.get(\"tag\") c =", "tbc = None th = None lbc = None lbh = None country", "= clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await tracked.to_list(length=limit):", "server autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan", "must select max_values=1 # the maximum number of options a user can select", "usafam.clans server = usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot: commands.Bot): self.bot", "tag = clan.get(\"tag\") c = await getClan(tag) location = str(c.location) if location not", "Post Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan", "try: th = results.get(\"tophour\") th = real_times[th - 5] th = f\"<t:1643263200:t>\" except:", "None country = None results = await server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time", "in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, # the", "the minimum number of options a user must select max_values=1 # the maximum", "{th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard Country:", "from utils.clash import client, pingToChannel, getClan import disnake usafam = client.usafam clans =", "= usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot: commands.Bot): self.bot = bot", "lbc) lbc = lbc.mention except: pass try: lbh = results.get(\"lbhour\") lbh = real_times[lbh", "components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def boardlist(self, ctx): tbc = None th", "name=\"Board Setup\"): def __init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async def autoboard(self,", "clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await tracked.to_list(length=limit): tag", "def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async def setupboard(self, ctx:", "== \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set':", "real_times[th - 5] th = f\"<t:1643263200:t>\" except: pass try: lbc = results.get(\"lbboardChannel\") lbc", "Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}})", "description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\"", "perms = ctx.author.guild_permissions.manage_guild if not perms: embed = disnake.Embed(description=\"Command requires you to have", "autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id},", "limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await tracked.to_list(length=limit): tag = clan.get(\"tag\")", "# the maximum number of options a user can select ) action_row =", "3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc) tbc =", "5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\":", "# the minimum number of options a user must select max_values=1 # the", "results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc) lbc = lbc.mention except: pass try: lbh", "leaderboard Post Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\"", "location = str(c.location) if location not in rr: rr.append(str(location)) options = [] for", "check(res: disnake.MessageInteraction): return res.message.id == msg.id country = False while country == False:", "f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async def removeboard(self,", "ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel:", "= [] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page", "await server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time = 1643263200 for x in range(0,", "if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\":", "= disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed)", "if res.author.id != ctx.author.id: await res.send(content=\"You must run the command to interact with", "country = results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\"", "embed = disnake.Embed(title=\"**Autoboard Successfully Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await", "action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country would you like the leaderboard autoboard?**\", color=disnake.Color.green())", "= await pingToChannel(ctx, lbc) lbc = lbc.mention except: pass try: lbh = results.get(\"lbhour\")", "getClan(tag) location = str(c.location) if location not in rr: rr.append(str(location)) options = []", "select max_values=1 # the maximum number of options a user can select )", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": channel.id}}) await", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": None}}) await server.update_one({\"server\":", "None if autoboard_type == \"Clan Leaderboard\": rr = [] tracked = clans.find({\"server\": ctx.guild.id})", "results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc) tbc = tbc.mention except: pass try: th", "try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[]) break if", "{'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await server.update_one({\"server\": ctx.guild.id},", "{tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post", "autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async def", "what country would you like the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row]) def", "value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, # the minimum number of", "None lbc = None lbh = None country = None results = await", "+ (x * 3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc = await pingToChannel(ctx,", "List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\" f\"Clan leaderboard Channel:", "* 3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc) tbc", "disnake.ext import commands from utils.clash import client, pingToChannel, getClan import disnake usafam =", "await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type:", "{'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\"", "24): t = start_time + (x * 3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\")", "clans = usafam.clans server = usafam.server class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot:", "await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except: await msg.edit(components=[]) break if res.author.id != ctx.author.id: await", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\"", "pingToChannel(ctx, lbc) lbc = lbc.mention except: pass try: lbh = results.get(\"lbhour\") lbh =", "msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str", "None th = None lbc = None lbh = None country = None", "async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async def setupboard(self,", "disnake.Embed(title=\"**For what country would you like the leaderboard autoboard?**\", color=disnake.Color.green()) await ctx.edit_original_message(embed=embed, components=[action_row])", "to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer() msg =", "Post Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\", color=disnake.Color.green()) await ctx.send(embed=embed) def setup(bot: commands.Bot):", "f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\") async", "select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1, # the minimum number of options", "getClan import disnake usafam = client.usafam clans = usafam.clans server = usafam.server class", "lbh = f\"<t:1643263200:t>\" except: pass try: country = results.get(\"country\") except: pass embed =", "x in range(0, 24): t = start_time + (x * 3600) real_times.append(t) try:", "return await ctx.send(embed=embed) await ctx.response.defer() msg = await ctx.original_message() country = None if", "pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel,", "color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def boardlist(self, ctx): tbc", "await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async def boardlist(self, ctx): tbc =", "except: pass try: lbc = results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc) lbc =", "ctx.author.id: await res.send(content=\"You must run the command to interact with components.\", ephemeral=True) continue", "= disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country would you like the leaderboard", "{\"lbhour\": None}}) embed = disnake.Embed(description=f\"{autoboard_type} autoboard has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[])", "if location not in rr: rr.append(str(location)) options = [] for country in rr:", "autoboard\") async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])):", "res.author.id != ctx.author.id: await res.send(content=\"You must run the command to interact with components.\",", "Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not perms: embed = disnake.Embed(description=\"Command requires you to", "Channel: {tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard", "commands from utils.clash import client, pingToChannel, getClan import disnake usafam = client.usafam clans", "await ctx.original_message() country = None if autoboard_type == \"Clan Leaderboard\": rr = []", "real_times = [] start_time = 1643263200 for x in range(0, 24): t =", "country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\" time =", "async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan", "requires you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed", "try: tbc = results.get(\"topboardchannel\") tbc = await pingToChannel(ctx, tbc) tbc = tbc.mention except:", "tbc.mention except: pass try: th = results.get(\"tophour\") th = real_times[th - 5] th", "th = None lbc = None lbh = None country = None results", "if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\":", "Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\", color=disnake.Color.green()) await ctx.send(embed=embed) def setup(bot: commands.Bot): bot.add_cog(autoB(bot))", "commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms = ctx.author.guild_permissions.manage_guild if not perms: embed = disnake.Embed(description=\"Command", "ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex = f\"\\nCountry: {country}\" time = f\"<t:{1643263200}:t>\" embed =", "async def removeboard(self, ctx: disnake.ApplicationCommandInteraction, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms", "in rr: rr.append(str(location)) options = [] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1", "country = None results = await server.find_one({\"server\": ctx.guild.id}) real_times = [] start_time =", "{channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server autoboard\")", "= results.get(\"tophour\") th = real_times[th - 5] th = f\"<t:1643263200:t>\" except: pass try:", "autoposting leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player", "ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str = commands.Param(choices=[\"Player Leaderboard\", \"Clan Leaderboard\"])): perms =", "disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country would you like the leaderboard autoboard?**\",", "await tracked.to_list(length=limit): tag = clan.get(\"tag\") c = await getClan(tag) location = str(c.location) if", "can select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country would", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": None}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else:", "leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\" f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan", "= await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await tracked.to_list(length=limit): tag = clan.get(\"tag\") c", "for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 = disnake.ui.Select( options=options, placeholder=\"Page Navigation\", min_values=1,", "[] tracked = clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in", "class autoB(commands.Cog, name=\"Board Setup\"): def __init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async", "clan.get(\"tag\") c = await getClan(tag) location = str(c.location) if location not in rr:", "rr: rr.append(str(location)) options = [] for country in rr: options.append(disnake.SelectOption(label=f\"{country}\", value=f\"{country}\")) select1 =", "description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a server", "Channel: {lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\", color=disnake.Color.green()) await", "import disnake usafam = client.usafam clans = usafam.clans server = usafam.server class autoB(commands.Cog,", "pass try: country = results.get(\"country\") except: pass embed = disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"country\": country}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbhour\": 5}}) tex =", "minimum number of options a user must select max_values=1 # the maximum number", "= None lbc = None lbh = None country = None results =", "= start_time + (x * 3600) real_times.append(t) try: tbc = results.get(\"topboardchannel\") tbc =", "= 1643263200 for x in range(0, 24): t = start_time + (x *", "autoboard has been removed.\", color=disnake.Color.green()) await ctx.send(embed=embed, components=[]) @autoboard.sub_command(name=\"list\", description=\"View server autoboards\") async", "in await tracked.to_list(length=limit): tag = clan.get(\"tag\") c = await getClan(tag) location = str(c.location)", "self.bot = bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\", description=\"Create server autoposting", "server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\": channel.id}}) await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": 5}}) else: await", "user must select max_values=1 # the maximum number of options a user can", "= str(res.values[0]) tex = \"\" if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id},", "a user must select max_values=1 # the maximum number of options a user", "False while country == False: try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600)", "__init__(self, bot: commands.Bot): self.bot = bot @commands.slash_command(name=\"autoboard\") async def autoboard(self, ctx): pass @autoboard.sub_command(name=\"create\",", "while country == False: try: res: disnake.MessageInteraction = await self.bot.wait_for(\"message_interaction\", check=check, timeout=600) except:", "tex = \"\" if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"topboardchannel\":", "disnake.Embed(title=\"**Autoboard List**\", description=f\"Player leaderboard Channel: {tbc}\\n\" f\"Player leaderboard Post Time: {th}\\n\" f\"Clan leaderboard", "Setup**\", description=f\"Channel: {channel.mention}\\n\" f\"Time: {time}\\n\" f\"Type: {autoboard_type}{tex}\", color=disnake.Color.green()) await msg.edit(embed=embed) @autoboard.sub_command(name=\"remove\", description=\"Remove a", "f\"Clan leaderboard Channel: {lbc}\\n\" f\"Clan leaderboard Post Time: {lbh}\\n\" f\"Clan leaderboard Country: {country}\\n\",", "Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer() msg = await ctx.original_message() country", "server autoposting leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str =", "ctx.original_message() country = None if autoboard_type == \"Clan Leaderboard\": rr = [] tracked", "- 5] th = f\"<t:1643263200:t>\" except: pass try: lbc = results.get(\"lbboardChannel\") lbc =", "you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) if autoboard_type ==", "await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"tophour\": None}}) else: await server.update_one({\"server\": ctx.guild.id}, {'$set': {\"lbboardChannel\": None}})", "lbh = real_times[lbh - 5] lbh = f\"<t:1643263200:t>\" except: pass try: country =", "\"Clan Leaderboard\": rr = [] tracked = clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\":", "tracked = clans.find({\"server\": ctx.guild.id}) limit = await clans.count_documents(filter={\"server\": ctx.guild.id}) for clan in await", "str(res.values[0]) tex = \"\" if autoboard_type == \"Player Leaderboard\": await server.update_one({\"server\": ctx.guild.id}, {'$set':", "description=\"Create server autoposting leaderboards\") async def setupboard(self, ctx: disnake.ApplicationCommandInteraction, channel: disnake.TextChannel, autoboard_type: str", "tbc) tbc = tbc.mention except: pass try: th = results.get(\"tophour\") th = real_times[th", "must run the command to interact with components.\", ephemeral=True) continue country = str(res.values[0])", "boardlist(self, ctx): tbc = None th = None lbc = None lbh =", "disnake.Embed(description=\"Command requires you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await", "f\"<t:1643263200:t>\" except: pass try: lbc = results.get(\"lbboardChannel\") lbc = await pingToChannel(ctx, lbc) lbc", "results.get(\"tophour\") th = real_times[th - 5] th = f\"<t:1643263200:t>\" except: pass try: lbc", "requires you to have `Manage Server` permissions.\", color=disnake.Color.red()) return await ctx.send(embed=embed) await ctx.response.defer()", "pingToChannel(ctx, tbc) tbc = tbc.mention except: pass try: th = results.get(\"tophour\") th =", "if autoboard_type == \"Clan Leaderboard\": rr = [] tracked = clans.find({\"server\": ctx.guild.id}) limit", "select ) action_row = disnake.ui.ActionRow() action_row.append_item(select1) embed = disnake.Embed(title=\"**For what country would you", "break if res.author.id != ctx.author.id: await res.send(content=\"You must run the command to interact", "await res.send(content=\"You must run the command to interact with components.\", ephemeral=True) continue country" ]
[ "i.e. these rules expose flavors for named AMD modules and ECMAScript module output.", "provider should be collected. Includes data runfiles needed for the default outputs from", "== \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module", "of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = [] for", "if ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default files", "collected, extract direct # and transitive declaration files using the `DeclarationInfo` provider. if", "flavors (which are distinguishable by the JavaScript module providers as imported from `providers.bzl`).", "== \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\"", "For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation = _extract_js_module_output_impl, attrs = {", "\"include_default_files\": attr.bool( mandatory = True, doc = \"\"\" Whether files from the `DefaultInfo`", "\"provider\": attr.string( doc = \"JavaScript module info provider that is used for collecting", "example only the `JSModule` outputs are of interest. As an example: This rule", "(including transitive outputs) based on the # configured JavaScript module provider. if js_module_provider", "defined for a target while for example only the `JSModule` outputs are of", "for a target while for example only the `JSModule` outputs are of interest.", "instance.\"\"\" def _name_to_js_module_provider(name): if name == \"JSModuleInfo\": return JSModuleInfo elif name == \"JSNamedModuleInfo\":", "are needed for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in", "files should be collected, extract direct # files which are exposed using the", "example: This rule is helpful in combination with `ts_library` and `ng_module` as those", "doc = \"Whether declaration files should be collected from the dependencies.\", ), \"include_default_files\":", "name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript", "attrs = { \"deps\": attr.label_list( allow_files = True, ), \"provider\": attr.string( doc =", "== \"JSModuleInfo\": return JSModuleInfo elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name ==", "depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default files should be collected, extract direct #", "configurable JavaScript module provider. The extracted outputs are exposed within the `DefaultInfo` provider.", "the # configured JavaScript module provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based", "Also include # data runfiles which are needed for the current target. #", "\"\"\"Implementation of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = []", "JavaScript sources, or if there are multiple JavaScript output variants defined for a", "an example: This rule is helpful in combination with `ts_library` and `ng_module` as", "dependencies.\", ), \"include_default_files\": attr.bool( mandatory = True, doc = \"\"\" Whether files from", "extract direct # and transitive declaration files using the `DeclarationInfo` provider. if ctx.attr.include_declarations", "True, doc = \"Whether declaration files should be collected from the dependencies.\", ),", "Targets defined using this rule can be used as input for rules that", "can be used as input for rules that require JavaScript sources, or if", "DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets) return [DefaultInfo(files =", "Based on whether declarations should be collected, extract direct # and transitive declaration", "there are multiple JavaScript output variants defined for a target while for example", "in ctx.attr.deps: # Include JavaScript sources (including transitive outputs) based on the #", "those rule expose multiple output flavors (which are distinguishable by the JavaScript module", "based on a configurable JavaScript module provider. The extracted outputs are exposed within", "_extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep in ctx.attr.deps: # Include", "JSModuleInfo elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo", "declaration files should be collected from the dependencies.\", ), \"include_default_files\": attr.bool( mandatory =", "from `providers.bzl`). i.e. these rules expose flavors for named AMD modules and ECMAScript", "helpful in combination with `ts_library` and `ng_module` as those rule expose multiple output", "Rule that collects declared JavaScript module output files from a list of dependencies", "module output files from a list of dependencies based on a configurable JavaScript", "imported from `providers.bzl`). i.e. these rules expose flavors for named AMD modules and", "outputs) based on the # configured JavaScript module provider. if js_module_provider in dep:", "provider. Targets defined using this rule can be used as input for rules", "depset(transitive = depsets) return [DefaultInfo(files = sources)] \"\"\" Rule that collects declared JavaScript", "`JSModule` outputs are of interest. As an example: This rule is helpful in", "info provider that is used for collecting sources from the dependencies.\", mandatory =", "and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default files should be", "for rules that require JavaScript sources, or if there are multiple JavaScript output", "outputs are exposed within the `DefaultInfo` provider. Targets defined using this rule can", "declared JavaScript module output files from a list of dependencies based on a", "for dep in ctx.attr.deps: # Include JavaScript sources (including transitive outputs) based on", "files which are exposed using the `DefaultInfo` provider. Also include # data runfiles", "rule expose multiple output flavors (which are distinguishable by the JavaScript module providers", "the `DefaultInfo` provider should be collected. Includes data runfiles needed for the default", "exposed using the `DefaultInfo` provider. Also include # data runfiles which are needed", "This rule is helpful in combination with `ts_library` and `ng_module` as those rule", "https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation = _extract_js_module_output_impl, attrs = { \"deps\": attr.label_list(", "flavors for named AMD modules and ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\"", "`DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether", "JavaScript output variants defined for a target while for example only the `JSModule`", "for example only the `JSModule` outputs are of interest. As an example: This", "rule( implementation = _extract_js_module_output_impl, attrs = { \"deps\": attr.label_list( allow_files = True, ),", "needed for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in dep:", "= True, doc = \"\"\" Whether files from the `DefaultInfo` provider should be", "used for collecting sources from the dependencies.\", mandatory = True, values = [\"JSModuleInfo\",", "name == \"JSModuleInfo\": return JSModuleInfo elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name", "\"JSNamedModuleInfo\": return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\")", "multiple JavaScript output variants defined for a target while for example only the", "input for rules that require JavaScript sources, or if there are multiple JavaScript", "JavaScript sources (including transitive outputs) based on the # configured JavaScript module provider.", "named AMD modules and ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output =", "direct # and transitive declaration files using the `DeclarationInfo` provider. if ctx.attr.include_declarations and", "AMD modules and ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule(", "JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of", "fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider =", "JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider", "`DefaultInfo` provider. Also include # data runfiles which are needed for the current", "\"JSModuleInfo\", \"JSNamedModuleInfo\") \"\"\"Converts a provider name to its actually Starlark provider instance.\"\"\" def", "\"JavaScript module info provider that is used for collecting sources from the dependencies.\",", "from the `DefaultInfo` provider should be collected. Includes data runfiles needed for the", "outputs are of interest. As an example: This rule is helpful in combination", "dep: depsets.append(dep[js_module_provider].sources) # Based on whether declarations should be collected, extract direct #", "depsets) return [DefaultInfo(files = sources)] \"\"\" Rule that collects declared JavaScript module output", "# data runfiles which are needed for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if", "whether declarations should be collected, extract direct # and transitive declaration files using", "def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep in ctx.attr.deps: #", "by the JavaScript module providers as imported from `providers.bzl`). i.e. these rules expose", "js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based on whether declarations should be collected, extract", "\"\"\" Whether files from the `DefaultInfo` provider should be collected. Includes data runfiles", "to its actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if name == \"JSModuleInfo\": return", "# and transitive declaration files using the `DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo", "data runfiles which are needed for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files", "`providers.bzl`). i.e. these rules expose flavors for named AMD modules and ECMAScript module", "should be collected. Includes data runfiles needed for the default outputs from dependencies.", "output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation = _extract_js_module_output_impl, attrs =", "ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets) return", "[] for dep in ctx.attr.deps: # Include JavaScript sources (including transitive outputs) based", "direct # files which are exposed using the `DefaultInfo` provider. Also include #", "the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files)", "are of interest. As an example: This rule is helpful in combination with", "= rule( implementation = _extract_js_module_output_impl, attrs = { \"deps\": attr.label_list( allow_files = True,", "extracted outputs are exposed within the `DefaultInfo` provider. Targets defined using this rule", "the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep", "# files which are exposed using the `DefaultInfo` provider. Also include # data", "True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory = True, doc", "_name_to_js_module_provider(name): if name == \"JSModuleInfo\": return JSModuleInfo elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo", "should be collected, extract direct # files which are exposed using the `DefaultInfo`", "attr.string( doc = \"JavaScript module info provider that is used for collecting sources", "this rule can be used as input for rules that require JavaScript sources,", "files from a list of dependencies based on a configurable JavaScript module provider.", "using the `DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based", "from the dependencies.\", ), \"include_default_files\": attr.bool( mandatory = True, doc = \"\"\" Whether", "\"JSEcmaScriptModuleInfo\", \"JSModuleInfo\", \"JSNamedModuleInfo\") \"\"\"Converts a provider name to its actually Starlark provider instance.\"\"\"", "module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation = _extract_js_module_output_impl, attrs", "provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based on whether declarations should be", "= sources)] \"\"\" Rule that collects declared JavaScript module output files from a", "attr.bool( mandatory = True, doc = \"Whether declaration files should be collected from", "\"deps\": attr.label_list( allow_files = True, ), \"provider\": attr.string( doc = \"JavaScript module info", "as input for rules that require JavaScript sources, or if there are multiple", "in combination with `ts_library` and `ng_module` as those rule expose multiple output flavors", "= \"\"\" Whether files from the `DefaultInfo` provider should be collected. Includes data", "list of dependencies based on a configurable JavaScript module provider. The extracted outputs", "if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based on whether declarations should be collected,", "name to its actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if name == \"JSModuleInfo\":", "\"\"\" Rule that collects declared JavaScript module output files from a list of", "for named AMD modules and ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output", "\"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory = True, doc = \"Whether declaration files", "interest. As an example: This rule is helpful in combination with `ts_library` and", "expose flavors for named AMD modules and ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337", "depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets) return [DefaultInfo(files = sources)] \"\"\" Rule", "attr.label_list( allow_files = True, ), \"provider\": attr.string( doc = \"JavaScript module info provider", "the `DefaultInfo` provider. Also include # data runfiles which are needed for the", "variants defined for a target while for example only the `JSModule` outputs are", "a target while for example only the `JSModule` outputs are of interest. As", "which are needed for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo", "are exposed using the `DefaultInfo` provider. Also include # data runfiles which are", "= { \"deps\": attr.label_list( allow_files = True, ), \"provider\": attr.string( doc = \"JavaScript", "), \"provider\": attr.string( doc = \"JavaScript module info provider that is used for", "provider name to its actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if name ==", "rule can be used as input for rules that require JavaScript sources, or", "these rules expose flavors for named AMD modules and ECMAScript module output. For", "ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation = _extract_js_module_output_impl,", "= [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory = True, doc = \"Whether", "include # data runfiles which are needed for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles", "attr.bool( mandatory = True, doc = \"\"\" Whether files from the `DefaultInfo` provider", "for collecting sources from the dependencies.\", mandatory = True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\",", "target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources =", "output flavors (which are distinguishable by the JavaScript module providers as imported from", "collected from the dependencies.\", ), \"include_default_files\": attr.bool( mandatory = True, doc = \"\"\"", "is helpful in combination with `ts_library` and `ng_module` as those rule expose multiple", "collecting sources from the dependencies.\", mandatory = True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"],", "the JavaScript module providers as imported from `providers.bzl`). i.e. these rules expose flavors", "\"include_declarations\": attr.bool( mandatory = True, doc = \"Whether declaration files should be collected", "ctx.attr.deps: # Include JavaScript sources (including transitive outputs) based on the # configured", "# Based on whether default files should be collected, extract direct # files", "multiple output flavors (which are distinguishable by the JavaScript module providers as imported", "`DefaultInfo` provider should be collected. Includes data runfiles needed for the default outputs", "configured JavaScript module provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based on whether", "a configurable JavaScript module provider. The extracted outputs are exposed within the `DefaultInfo`", "using this rule can be used as input for rules that require JavaScript", "whether default files should be collected, extract direct # files which are exposed", "as imported from `providers.bzl`). i.e. these rules expose flavors for named AMD modules", "be collected, extract direct # and transitive declaration files using the `DeclarationInfo` provider.", "\"Whether declaration files should be collected from the dependencies.\", ), \"include_default_files\": attr.bool( mandatory", "exposed within the `DefaultInfo` provider. Targets defined using this rule can be used", "provider. The extracted outputs are exposed within the `DefaultInfo` provider. Targets defined using", "or if there are multiple JavaScript output variants defined for a target while", "\"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\" def", "from the dependencies.\", mandatory = True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\":", "= depsets) return [DefaultInfo(files = sources)] \"\"\" Rule that collects declared JavaScript module", "as those rule expose multiple output flavors (which are distinguishable by the JavaScript", "be used as input for rules that require JavaScript sources, or if there", "elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of the", "be collected, extract direct # files which are exposed using the `DefaultInfo` provider.", "modules and ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation", "is used for collecting sources from the dependencies.\", mandatory = True, values =", "be collected from the dependencies.\", ), \"include_default_files\": attr.bool( mandatory = True, doc =", "doc = \"\"\" Whether files from the `DefaultInfo` provider should be collected. Includes", "_extract_js_module_output_impl, attrs = { \"deps\": attr.label_list( allow_files = True, ), \"provider\": attr.string( doc", "depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets) return [DefaultInfo(files = sources)] \"\"\" Rule that", "on whether default files should be collected, extract direct # files which are", "= True, doc = \"Whether declaration files should be collected from the dependencies.\",", "providers as imported from `providers.bzl`). i.e. these rules expose flavors for named AMD", "while for example only the `JSModule` outputs are of interest. As an example:", "provider. if ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default", "provider instance.\"\"\" def _name_to_js_module_provider(name): if name == \"JSModuleInfo\": return JSModuleInfo elif name ==", "files should be collected from the dependencies.\", ), \"include_default_files\": attr.bool( mandatory = True,", "\"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory = True, doc = \"Whether declaration files should", "return [DefaultInfo(files = sources)] \"\"\" Rule that collects declared JavaScript module output files", "provider that is used for collecting sources from the dependencies.\", mandatory = True,", "= _name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep in ctx.attr.deps: # Include JavaScript sources", "runfiles which are needed for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and", "sources = depset(transitive = depsets) return [DefaultInfo(files = sources)] \"\"\" Rule that collects", "depsets.append(dep[js_module_provider].sources) # Based on whether declarations should be collected, extract direct # and", "rules that require JavaScript sources, or if there are multiple JavaScript output variants", "JavaScript module output files from a list of dependencies based on a configurable", "True, doc = \"\"\" Whether files from the `DefaultInfo` provider should be collected.", "js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep in ctx.attr.deps: # Include JavaScript", "return JSModuleInfo elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return", "# configured JavaScript module provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based on", "elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected", "with `ts_library` and `ng_module` as those rule expose multiple output flavors (which are", "within the `DefaultInfo` provider. Targets defined using this rule can be used as", "distinguishable by the JavaScript module providers as imported from `providers.bzl`). i.e. these rules", "extract_js_module_output = rule( implementation = _extract_js_module_output_impl, attrs = { \"deps\": attr.label_list( allow_files =", "values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory = True, doc =", "and transitive declaration files using the `DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo in", "# https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive", "sources (including transitive outputs) based on the # configured JavaScript module provider. if", "= \"JavaScript module info provider that is used for collecting sources from the", "As an example: This rule is helpful in combination with `ts_library` and `ng_module`", "sources, or if there are multiple JavaScript output variants defined for a target", "in dep: depsets.append(dep[js_module_provider].sources) # Based on whether declarations should be collected, extract direct", "the `DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on", "which are exposed using the `DefaultInfo` provider. Also include # data runfiles which", "rule is helpful in combination with `ts_library` and `ng_module` as those rule expose", "name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of the extract_js_module_output", "[\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory = True, doc = \"Whether declaration", "expose multiple output flavors (which are distinguishable by the JavaScript module providers as", "\"DeclarationInfo\", \"JSEcmaScriptModuleInfo\", \"JSModuleInfo\", \"JSNamedModuleInfo\") \"\"\"Converts a provider name to its actually Starlark provider", "actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if name == \"JSModuleInfo\": return JSModuleInfo elif", "dependencies.\", mandatory = True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory", "should be collected, extract direct # and transitive declaration files using the `DeclarationInfo`", "are distinguishable by the JavaScript module providers as imported from `providers.bzl`). i.e. these", "doc = \"JavaScript module info provider that is used for collecting sources from", "combination with `ts_library` and `ng_module` as those rule expose multiple output flavors (which", "= depset(transitive = depsets) return [DefaultInfo(files = sources)] \"\"\" Rule that collects declared", "default files should be collected, extract direct # files which are exposed using", "transitive outputs) based on the # configured JavaScript module provider. if js_module_provider in", "should be collected from the dependencies.\", ), \"include_default_files\": attr.bool( mandatory = True, doc", "are multiple JavaScript output variants defined for a target while for example only", "files from the `DefaultInfo` provider should be collected. Includes data runfiles needed for", "current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources", "Include JavaScript sources (including transitive outputs) based on the # configured JavaScript module", "Whether files from the `DefaultInfo` provider should be collected. Includes data runfiles needed", "`ng_module` as those rule expose multiple output flavors (which are distinguishable by the", "collected, extract direct # files which are exposed using the `DefaultInfo` provider. Also", "declarations should be collected, extract direct # and transitive declaration files using the", "JavaScript module provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider)", "extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep in", "in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default files should be collected, extract", "dependencies based on a configurable JavaScript module provider. The extracted outputs are exposed", "in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets) return [DefaultInfo(files = sources)]", "sources from the dependencies.\", mandatory = True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ),", "def _name_to_js_module_provider(name): if name == \"JSModuleInfo\": return JSModuleInfo elif name == \"JSNamedModuleInfo\": return", "JavaScript module providers as imported from `providers.bzl`). i.e. these rules expose flavors for", "ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default files should", "and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets) return [DefaultInfo(files", "return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\": return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation", "and ECMAScript module output. For reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation =", "dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets) return [DefaultInfo(files = sources)] \"\"\"", "a provider name to its actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if name", "DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default files should be collected,", "True, ), \"provider\": attr.string( doc = \"JavaScript module info provider that is used", "{ \"deps\": attr.label_list( allow_files = True, ), \"provider\": attr.string( doc = \"JavaScript module", "provider. Also include # data runfiles which are needed for the current target.", "on the # configured JavaScript module provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) #", "of interest. As an example: This rule is helpful in combination with `ts_library`", "), \"include_default_files\": attr.bool( mandatory = True, doc = \"\"\" Whether files from the", "used as input for rules that require JavaScript sources, or if there are", "the dependencies.\", ), \"include_default_files\": attr.bool( mandatory = True, doc = \"\"\" Whether files", "files using the `DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations) #", "if ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive = depsets)", "only the `JSModule` outputs are of interest. As an example: This rule is", "rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep in ctx.attr.deps:", "of dependencies based on a configurable JavaScript module provider. The extracted outputs are", "`DefaultInfo` provider. Targets defined using this rule can be used as input for", "the dependencies.\", mandatory = True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool(", "using the `DefaultInfo` provider. Also include # data runfiles which are needed for", "rules expose flavors for named AMD modules and ECMAScript module output. For reference:", "be collected. Includes data runfiles needed for the default outputs from dependencies. \"\"\",", "<filename>dev-infra/bazel/extract_js_module_output.bzl load(\"@build_bazel_rules_nodejs//:providers.bzl\", \"DeclarationInfo\", \"JSEcmaScriptModuleInfo\", \"JSModuleInfo\", \"JSNamedModuleInfo\") \"\"\"Converts a provider name to its actually", "= \"Whether declaration files should be collected from the dependencies.\", ), \"include_default_files\": attr.bool(", "(which are distinguishable by the JavaScript module providers as imported from `providers.bzl`). i.e.", "its actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if name == \"JSModuleInfo\": return JSModuleInfo", "\"JSNamedModuleInfo\") \"\"\"Converts a provider name to its actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name):", "implementation = _extract_js_module_output_impl, attrs = { \"deps\": attr.label_list( allow_files = True, ), \"provider\":", "provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets =", "# Based on whether declarations should be collected, extract direct # and transitive", "target while for example only the `JSModule` outputs are of interest. As an", "mandatory = True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory =", "Based on whether default files should be collected, extract direct # files which", "module provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based on whether declarations should", "mandatory = True, doc = \"\"\" Whether files from the `DefaultInfo` provider should", "\"\"\"Converts a provider name to its actually Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if", "declaration files using the `DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo in dep: depsets.append(dep[DeclarationInfo].transitive_declarations)", "if there are multiple JavaScript output variants defined for a target while for", "are exposed within the `DefaultInfo` provider. Targets defined using this rule can be", "https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files) depsets.append(dep[DefaultInfo].data_runfiles.files) sources = depset(transitive =", "dep in ctx.attr.deps: # Include JavaScript sources (including transitive outputs) based on the", "depsets = [] for dep in ctx.attr.deps: # Include JavaScript sources (including transitive", "sources)] \"\"\" Rule that collects declared JavaScript module output files from a list", "Starlark provider instance.\"\"\" def _name_to_js_module_provider(name): if name == \"JSModuleInfo\": return JSModuleInfo elif name", "JavaScript module provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources) # Based on whether declarations", "on whether declarations should be collected, extract direct # and transitive declaration files", "on a configurable JavaScript module provider. The extracted outputs are exposed within the", "[DefaultInfo(files = sources)] \"\"\" Rule that collects declared JavaScript module output files from", "module provider. The extracted outputs are exposed within the `DefaultInfo` provider. Targets defined", "= _extract_js_module_output_impl, attrs = { \"deps\": attr.label_list( allow_files = True, ), \"provider\": attr.string(", "that is used for collecting sources from the dependencies.\", mandatory = True, values", "allow_files = True, ), \"provider\": attr.string( doc = \"JavaScript module info provider that", "# Include JavaScript sources (including transitive outputs) based on the # configured JavaScript", "for the current target. # https://docs.bazel.build/versions/main/skylark/lib/DefaultInfo.html#data_runfiles if ctx.attr.include_default_files and DefaultInfo in dep: depsets.append(dep[DefaultInfo].files)", "the `JSModule` outputs are of interest. As an example: This rule is helpful", "defined using this rule can be used as input for rules that require", "Includes data runfiles needed for the default outputs from dependencies. \"\"\", ), },", "_name_to_js_module_provider(ctx.attr.provider) depsets = [] for dep in ctx.attr.deps: # Include JavaScript sources (including", "module providers as imported from `providers.bzl`). i.e. these rules expose flavors for named", "data runfiles needed for the default outputs from dependencies. \"\"\", ), }, )", "The extracted outputs are exposed within the `DefaultInfo` provider. Targets defined using this", "module info provider that is used for collecting sources from the dependencies.\", mandatory", "reference: https://github.com/bazelbuild/rules_nodejs/blob/stable/packages/typescript/internal/build_defs.bzl#L334-L337 \"\"\" extract_js_module_output = rule( implementation = _extract_js_module_output_impl, attrs = { \"deps\":", "= True, ), \"provider\": attr.string( doc = \"JavaScript module info provider that is", "), \"include_declarations\": attr.bool( mandatory = True, doc = \"Whether declaration files should be", "transitive declaration files using the `DeclarationInfo` provider. if ctx.attr.include_declarations and DeclarationInfo in dep:", "the `DefaultInfo` provider. Targets defined using this rule can be used as input", "collected. Includes data runfiles needed for the default outputs from dependencies. \"\"\", ),", "collects declared JavaScript module output files from a list of dependencies based on", "that require JavaScript sources, or if there are multiple JavaScript output variants defined", "load(\"@build_bazel_rules_nodejs//:providers.bzl\", \"DeclarationInfo\", \"JSEcmaScriptModuleInfo\", \"JSModuleInfo\", \"JSNamedModuleInfo\") \"\"\"Converts a provider name to its actually Starlark", "extract direct # files which are exposed using the `DefaultInfo` provider. Also include", "`ts_library` and `ng_module` as those rule expose multiple output flavors (which are distinguishable", "mandatory = True, doc = \"Whether declaration files should be collected from the", "module provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx): js_module_provider = _name_to_js_module_provider(ctx.attr.provider) depsets", "require JavaScript sources, or if there are multiple JavaScript output variants defined for", "if name == \"JSModuleInfo\": return JSModuleInfo elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif", "a list of dependencies based on a configurable JavaScript module provider. The extracted", "output variants defined for a target while for example only the `JSModule` outputs", "that collects declared JavaScript module output files from a list of dependencies based", "= [] for dep in ctx.attr.deps: # Include JavaScript sources (including transitive outputs)", "\"\"\" extract_js_module_output = rule( implementation = _extract_js_module_output_impl, attrs = { \"deps\": attr.label_list( allow_files", "based on the # configured JavaScript module provider. if js_module_provider in dep: depsets.append(dep[js_module_provider].sources)", "and `ng_module` as those rule expose multiple output flavors (which are distinguishable by", "dep: depsets.append(dep[DeclarationInfo].transitive_declarations) # Based on whether default files should be collected, extract direct", "return JSEcmaScriptModuleInfo fail(\"Unexpected JavaScript module provider.\") \"\"\"Implementation of the extract_js_module_output rule.\"\"\" def _extract_js_module_output_impl(ctx):", "\"JSModuleInfo\": return JSModuleInfo elif name == \"JSNamedModuleInfo\": return JSNamedModuleInfo elif name == \"JSEcmaScriptModuleInfo\":", "from a list of dependencies based on a configurable JavaScript module provider. The", "output files from a list of dependencies based on a configurable JavaScript module", "JavaScript module provider. The extracted outputs are exposed within the `DefaultInfo` provider. Targets", "= True, values = [\"JSModuleInfo\", \"JSNamedModuleInfo\", \"JSEcmaScriptModuleInfo\"], ), \"include_declarations\": attr.bool( mandatory = True," ]
[ "TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute | attrs else: fnc.transmute", "transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we have a single method,", "hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute | attrs else: fnc.transmute = attrs add_route(blueprint, fnc)", "Party Libraries from sanic_transmute import add_route from transmute_core.compat import string_type from transmute_core.function import", "\"transmute\"): fnc.transmute = fnc.transmute | attrs else: fnc.transmute = attrs add_route(blueprint, fnc) return", "add_route from transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): #", "= fnc.transmute | attrs else: fnc.transmute = attrs add_route(blueprint, fnc) return fnc return", "sanic_transmute import add_route from transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint,", "utf-8 # Third Party Libraries from sanic_transmute import add_route from transmute_core.compat import string_type", "isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs =", "it a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"]", "if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc,", "# if we have a single method, make it a list. if isinstance(kwargs.get(\"paths\"),", "single method, make it a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if", "import string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we have", "Third Party Libraries from sanic_transmute import add_route from transmute_core.compat import string_type from transmute_core.function", "a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] =", "decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute | attrs else: fnc.transmute = attrs", "have a single method, make it a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] =", "from transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if", "if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute | attrs else: fnc.transmute = attrs add_route(blueprint,", "coding: utf-8 # Third Party Libraries from sanic_transmute import add_route from transmute_core.compat import", "if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs", "fnc.transmute = fnc.transmute | attrs else: fnc.transmute = attrs add_route(blueprint, fnc) return fnc", "# Third Party Libraries from sanic_transmute import add_route from transmute_core.compat import string_type from", "[kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if", "import add_route from transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs):", "fnc.transmute | attrs else: fnc.transmute = attrs add_route(blueprint, fnc) return fnc return decorator", "Libraries from sanic_transmute import add_route from transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes", "import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we have a single method, make", "list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]]", "transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we", "kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def", "a single method, make it a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]]", "= [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc):", "**kwargs): # if we have a single method, make it a list. if", "isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"):", "TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we have a single method, make it", "from sanic_transmute import add_route from transmute_core.compat import string_type from transmute_core.function import TransmuteAttributes def", "= [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute", "attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute | attrs", "describe_add_route(blueprint, **kwargs): # if we have a single method, make it a list.", "kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute =", "method, make it a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"),", "string_type from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we have a", "if we have a single method, make it a list. if isinstance(kwargs.get(\"paths\"), string_type):", "= TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute | attrs else:", "string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs)", "string_type): kwargs[\"methods\"] = [kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute", "def describe_add_route(blueprint, **kwargs): # if we have a single method, make it a", "make it a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"] = [kwargs[\"paths\"]] if isinstance(kwargs.get(\"methods\"), string_type):", "we have a single method, make it a list. if isinstance(kwargs.get(\"paths\"), string_type): kwargs[\"paths\"]", "# coding: utf-8 # Third Party Libraries from sanic_transmute import add_route from transmute_core.compat", "def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute | attrs else: fnc.transmute =", "[kwargs[\"methods\"]] attrs = TransmuteAttributes(**kwargs) def decorator(fnc): if hasattr(fnc, \"transmute\"): fnc.transmute = fnc.transmute |", "from transmute_core.function import TransmuteAttributes def describe_add_route(blueprint, **kwargs): # if we have a single" ]
[ "\"\"\"[This class makes request to GBIF] \"\"\" def __init__(self, taxon_key: int, species_name: str):", "configs from easy_sdm.utils import logger from typing import Dict, Optional from pathlib import", "os from typing import Dict from abc import ABC from easy_sdm.data import ShapefileRegion", "self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"],", "requests have a limit of 300 row for request]. Defaults to 0. Returns:", "gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory is None): gdf = self.__gdf_memory else: df", "\"offset\": offset, } r = requests.get(self.base_url, params=params) status_code = r.status_code if r.status_code !=", "[description] \"\"\" # Double check to certify there is no empty lat/long data", "df return df def __request_species_df(self): \"\"\"[Organizes GBIF information in a dataframe considering offsets", "] ) for result in request[\"results\"]: result = self.__refact_dict(result) df = df.append( {", "None def save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with", "requests.get(self.base_url, params=params) status_code = r.status_code if r.status_code != 200: logger.logging.info( f\"API call failed", "TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self):", "species as DataFrame\"\"\" if self.__df_memory: df = self.__df_memory else: df = self.__request_species_df() df", "if d_col not in columns: result[d_col] = None return result def __clean_species_df(self, df:", "import Dict from abc import ABC from easy_sdm.data import ShapefileRegion import geopandas as", "dataframe considering offsets ]\"\"\" end_of_records = False offset = 0 status = 200", "result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\",", "\"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\":", "coordinates = self.get_coordinates() return coordinates[:, 1] def get_latitudes(self,): coordinates = self.get_coordinates() return coordinates[:,", "= self.get_coordinates() return coordinates[:, 1] def get_latitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 0]", "def __build_species_df(self, request, df=None): \"\"\"[Create species dataframe with the request data] Args: df", "request data] Args: df ([type]): [description] request ([type]): [description] Returns: [df]: [description] \"\"\"", "considering offsets ]\"\"\" end_of_records = False offset = 0 status = 200 df", "ABC from easy_sdm.data import ShapefileRegion import geopandas as gpd import numpy as np", "from pathlib import Path class GBIFOccurencesRequester: \"\"\"[This class makes request to GBIF] \"\"\"", "of {r.status_code}.\" ) end_of_records = True else: r = r.json() end_of_records = r[\"endOfRecords\"]", "if configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True)", "as DataFrame\"\"\" if self.__df_memory: df = self.__df_memory else: df = self.__request_species_df() df =", "Args: offset (int, optional): [Offsset is a parameter to where starting the request", "species.taxon_key, species.name ) self.__df_memory = None def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if", "\"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] ) for result in", "self.name = name def __str__(self) -> str: return \"Species {self.name} with taxon key", "end_of_records = False offset = 0 status = 200 df = None while", "status_code = r.status_code if r.status_code != 200: logger.logging.info( f\"API call failed at offset", ") for result in request[\"results\"]: result = self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\":", "limit of 300 row for request]. Defaults to 0. Returns: [type]: [int] \"\"\"", "self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T", "\"YEAR\": result[\"year\"], }, ignore_index=True, ) return df def __refact_dict(self, result: Dict): \"\"\"Refact dict", "def __init__(self, taxon_key: int, name: str): self.taxon_key = taxon_key self.name = name def", "Double check to certify there is no empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])]", "import logger from typing import Dict, Optional from pathlib import Path class GBIFOccurencesRequester:", "failed at offset {offset} with a status code of {r.status_code}.\" ) end_of_records =", "str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, } r", "__clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description]", "geodataframes] \"\"\" def __init__( self, species: Species, proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species)", "empty cells\"\"\" columns = result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\",", "placing None in empty cells\"\"\" columns = result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\",", "a dataframe considering offsets ]\"\"\" end_of_records = False offset = 0 status =", "makes request to GBIF] \"\"\" def __init__(self, taxon_key: int, species_name: str): self.taxon_key =", "\"\"\"[Create species dataframe with the request data] Args: df ([type]): [description] request ([type]):", "with taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize data requested to GBIF", "Wrapper to extract relevant information from spescies geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame)", "in GBIF databse, since the requests have a limit of 300 row for", "self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int = 0): \"\"\"[", "\"\"\" gbif_configs = configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True,", "[description] \"\"\" if df is None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\",", "species_geodataframe def get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates", "([type]): [description] request ([type]): [description] Returns: [df]: [description] \"\"\" if df is None:", "SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested to GBIF into geopandas geodataframes] \"\"\" def", "into pandas dataframes] \"\"\" def __init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name", "-> None: self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]),", "= None def save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends", "easy_sdm.utils import logger from typing import Dict, Optional from pathlib import Path class", "df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df", "get_species_gdf(self): if not (self.__gdf_memory is None): gdf = self.__gdf_memory else: df = self.get_specie_df()", "output_path: Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True)", "\"occurrenceRemarks\", ] for d_col in desired_columns: if d_col not in columns: result[d_col] =", "configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return", "__build_species_df(self, request, df=None): \"\"\"[Create species dataframe with the request data] Args: df ([type]):", "True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, } r = requests.get(self.base_url, params=params) status_code", "parameter to where starting the request in GBIF databse, since the requests have", "df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\"", "\"\"\"Get species as DataFrame\"\"\" if self.__df_memory: df = self.__df_memory else: df = self.__request_species_df()", "df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\":", "extract relevant information from spescies geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None:", "= self.__gdf_memory else: df = self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) )", ") self.__df_memory = None def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if self.__df_memory: df", "get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def get_longitudes(self,):", "= taxon_key self.name = name def __str__(self) -> str: return \"Species {self.name} with", "numpy as np import pandas as pd import requests from easy_sdm.configs import configs", "200 df = None while end_of_records == False and status == 200: r,", "check to certify there is no empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df", "Removing duplicate data df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df ) #", "= species_geodataframe def get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return", "gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if", "= self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\":", "df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df = ( df.drop_duplicates(ignore_index=True) if", "else gdf ) self.__gdf_memory = gdf return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return", "= r[\"endOfRecords\"] return r, end_of_records, status_code class Species: def __init__(self, taxon_key: int, name:", "gdf ) self.__gdf_memory = gdf return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf)", "np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def get_longitudes(self,): coordinates = self.get_coordinates()", "result in request[\"results\"]: result = self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\":", "organize data requested to GBIF into geopandas geodataframes] \"\"\" def __init__( self, species:", "gbif_configs = configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\":", "result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, ) return df def", "[Offsset is a parameter to where starting the request in GBIF databse, since", "STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested", "\"\"\"[This class organize data requested to GBIF into pandas dataframes] \"\"\" def __init__(self,", "information in a dataframe considering offsets ]\"\"\" end_of_records = False offset = 0", "taxon_key self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int = 0):", "request to GBIF] \"\"\" def __init__(self, taxon_key: int, species_name: str): self.taxon_key = taxon_key", "= species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int = 0): \"\"\"[ Request", "self.__df_memory: df = self.__df_memory else: df = self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory =", "as np import pandas as pd import requests from easy_sdm.configs import configs from", "from typing import Dict, Optional from pathlib import Path class GBIFOccurencesRequester: \"\"\"[This class", "return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested to GBIF into geopandas", "as gpd import numpy as np import pandas as pd import requests from", "\"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, } r = requests.get(self.base_url, params=params)", "cells\"\"\" columns = result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\",", "self.__gdf_memory else: df = self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf", "= proposed_region self.__gdf_memory = None def save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"): raise", "else: r = r.json() end_of_records = r[\"endOfRecords\"] return r, end_of_records, status_code class Species:", "r.json() end_of_records = r[\"endOfRecords\"] return r, end_of_records, status_code class Species: def __init__(self, taxon_key:", "offset = len(df) + 1 self.__clean_species_df(df) return df def __build_species_df(self, request, df=None): \"\"\"[Create", "Request GBIF information about an species] Args: offset (int, optional): [Offsset is a", "params = { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"],", "= df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df = ( df.drop_duplicates(ignore_index=True)", "= requests.get(self.base_url, params=params) status_code = r.status_code if r.status_code != 200: logger.logging.info( f\"API call", "result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True,", "columns: result[d_col] = None return result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data]", "= None def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if self.__df_memory: df = self.__df_memory", "= pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ]", "\"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates", "import pandas as pd import requests from easy_sdm.configs import configs from easy_sdm.utils import", "300 row for request]. Defaults to 0. Returns: [type]: [int] \"\"\" gbif_configs =", "in request[\"results\"]: result = self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"],", "Optional from pathlib import Path class GBIFOccurencesRequester: \"\"\"[This class makes request to GBIF]", "\"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] ) for result in request[\"results\"]: result = self.__refact_dict(result)", "\"\"\"[A Wrapper to extract relevant information from spescies geodataframes] \"\"\" def __init__(self, species_geodataframe:", "dict placing None in empty cells\"\"\" columns = result.keys() desired_columns = [ \"scientificName\",", "to GBIF into pandas dataframes] \"\"\" def __init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester(", "data requested to GBIF into geopandas geodataframes] \"\"\" def __init__( self, species: Species,", "else: df = self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf =", "for result in request[\"results\"]: result = self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"],", "def get_species_gdf(self): if not (self.__gdf_memory is None): gdf = self.__gdf_memory else: df =", "end_of_records = r[\"endOfRecords\"] return r, end_of_records, status_code class Species: def __init__(self, taxon_key: int,", "__init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates = np.array(", "( self.__filter_species_in_region(gdf) if not (self.proposed_region is None) else gdf ) self.__gdf_memory = gdf", "typing import Dict from abc import ABC from easy_sdm.data import ShapefileRegion import geopandas", "species] Args: offset (int, optional): [Offsset is a parameter to where starting the", "to GBIF into geopandas geodataframes] \"\"\" def __init__( self, species: Species, proposed_region: Optional[ShapefileRegion]", "\"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], },", "= self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf", "r.status_code != 200: logger.logging.info( f\"API call failed at offset {offset} with a status", "200: r, end_of_records, status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset = len(df)", "class GBIFOccurencesRequester: \"\"\"[This class makes request to GBIF] \"\"\" def __init__(self, taxon_key: int,", "Dict, Optional from pathlib import Path class GBIFOccurencesRequester: \"\"\"[This class makes request to", "Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory = None def get_specie_df(self): \"\"\"Get", "have a limit of 300 row for request]. Defaults to 0. Returns: [type]:", "return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to", "Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\" # Double check to certify", "None in empty cells\"\"\" columns = result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\",", "of 300 row for request]. Defaults to 0. Returns: [type]: [int] \"\"\" gbif_configs", "easy_sdm.configs import configs from easy_sdm.utils import logger from typing import Dict, Optional from", "gdf = ( self.__filter_species_in_region(gdf) if not (self.proposed_region is None) else gdf ) self.__gdf_memory", "if r.status_code != 200: logger.logging.info( f\"API call failed at offset {offset} with a", "spescies geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe def", "None return result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]):", "df = self.__clean_species_df(df) self.__df_memory = df return df def __request_species_df(self): \"\"\"[Organizes GBIF information", "None) else gdf ) self.__gdf_memory = gdf return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame):", "= [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ]", "self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf =", "Species, proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory = None", "into geopandas geodataframes] \"\"\" def __init__( self, species: Species, proposed_region: Optional[ShapefileRegion] = None", "\"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] ) for result in request[\"results\"]: result =", "str): self.taxon_key = taxon_key self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset:", ") self.__gdf_memory = gdf return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class", "df def __build_species_df(self, request, df=None): \"\"\"[Create species dataframe with the request data] Args:", "\"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for d_col in desired_columns:", "GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory = None def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\"", "end_of_records == False and status == 200: r, end_of_records, status = self.gbif_occ_requester.request(offset) df", "offset: int = 0): \"\"\"[ Request GBIF information about an species] Args: offset", "= self.__df_memory else: df = self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory = df return", "= gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if not (self.proposed_region is None) else gdf", "def __refact_dict(self, result: Dict): \"\"\"Refact dict placing None in empty cells\"\"\" columns =", "df = self.__df_memory else: df = self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory = df", "class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested to GBIF into geopandas geodataframes] \"\"\"", "Data] Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\" # Double check to", "self.__df_memory = df return df def __request_species_df(self): \"\"\"[Organizes GBIF information in a dataframe", "taxon_key self.name = name def __str__(self) -> str: return \"Species {self.name} with taxon", "gpd import numpy as np import pandas as pd import requests from easy_sdm.configs", "self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory is None): gdf = self.__gdf_memory else:", "an species] Args: offset (int, optional): [Offsset is a parameter to where starting", "to 0. Returns: [type]: [int] \"\"\" gbif_configs = configs[\"gbif\"] params = { \"taxonKey\":", "is None): gdf = self.__gdf_memory else: df = self.get_specie_df() gdf = gpd.GeoDataFrame( df,", "len(df) + 1 self.__clean_species_df(df) return df def __build_species_df(self, request, df=None): \"\"\"[Create species dataframe", "information about an species] Args: offset (int, optional): [Offsset is a parameter to", "\"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int = 0): \"\"\"[ Request GBIF information about an", "r, end_of_records, status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset = len(df) +", "a status code of {r.status_code}.\" ) end_of_records = True else: r = r.json()", "\"month\", \"year\", \"occurrenceRemarks\", ] for d_col in desired_columns: if d_col not in columns:", "import ABC from easy_sdm.data import ShapefileRegion import geopandas as gpd import numpy as", "\"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\":", "import Path class GBIFOccurencesRequester: \"\"\"[This class makes request to GBIF] \"\"\" def __init__(self,", "return df def __build_species_df(self, request, df=None): \"\"\"[Create species dataframe with the request data]", "\"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, }", "for request]. Defaults to 0. Returns: [type]: [int] \"\"\" gbif_configs = configs[\"gbif\"] params", "end_of_records, status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset = len(df) + 1", "return df def __request_species_df(self): \"\"\"[Organizes GBIF information in a dataframe considering offsets ]\"\"\"", "status code of {r.status_code}.\" ) end_of_records = True else: r = r.json() end_of_records", "in empty cells\"\"\" columns = result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\",", "in desired_columns: if d_col not in columns: result[d_col] = None return result def", "= gdf return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A", "else: df = self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory = df return df def", "None def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if self.__df_memory: df = self.__df_memory else:", "pandas as pd import requests from easy_sdm.configs import configs from easy_sdm.utils import logger", "import ShapefileRegion import geopandas as gpd import numpy as np import pandas as", "taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize data requested to GBIF into", "+ 1 self.__clean_species_df(df) return df def __build_species_df(self, request, df=None): \"\"\"[Create species dataframe with", "offsets ]\"\"\" end_of_records = False offset = 0 status = 200 df =", "0 status = 200 df = None while end_of_records == False and status", "relevant information from spescies geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe", "\"\"\" def __init__( self, species: Species, proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region", "1 self.__clean_species_df(df) return df def __build_species_df(self, request, df=None): \"\"\"[Create species dataframe with the", ") # Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder):", "\"DAY\", \"MONTH\", \"YEAR\", ] ) for result in request[\"results\"]: result = self.__refact_dict(result) df", "__init__( self, species: Species, proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region = proposed_region", "__filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant information", "return df def __refact_dict(self, result: Dict): \"\"\"Refact dict placing None in empty cells\"\"\"", "\"\"\" # Double check to certify there is no empty lat/long data df", "return \"Species {self.name} with taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize data", "self.__clean_species_df(df) return df def __build_species_df(self, request, df=None): \"\"\"[Create species dataframe with the request", "geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe def get_coordinates(self,):", "gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract", "requests from easy_sdm.configs import configs from easy_sdm.utils import logger from typing import Dict,", "\"country\": gbif_configs[\"country\"], \"offset\": offset, } r = requests.get(self.base_url, params=params) status_code = r.status_code if", "__str__(self) -> str: return \"Species {self.name} with taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This", "dataframes] \"\"\" def __init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory", "class SpeciesDFBuilder: \"\"\"[This class organize data requested to GBIF into pandas dataframes] \"\"\"", "\"\"\"Refact dict placing None in empty cells\"\"\" columns = result.keys() desired_columns = [", "df) offset = len(df) + 1 self.__clean_species_df(df) return df def __build_species_df(self, request, df=None):", "must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if", "gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant information from spescies", "request(self, offset: int = 0): \"\"\"[ Request GBIF information about an species] Args:", "class Species: def __init__(self, taxon_key: int, name: str): self.taxon_key = taxon_key self.name =", "gdf = self.__gdf_memory else: df = self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE)", "__refact_dict(self, result: Dict): \"\"\"Refact dict placing None in empty cells\"\"\" columns = result.keys()", "( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def get_longitudes(self,): coordinates = self.get_coordinates() return", "= result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\",", "df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if not", "df def __refact_dict(self, result: Dict): \"\"\"Refact dict placing None in empty cells\"\"\" columns", "True else: r = r.json() end_of_records = r[\"endOfRecords\"] return r, end_of_records, status_code class", "request[\"results\"]: result = self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\":", "\"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for d_col", "request ([type]): [description] Returns: [df]: [description] \"\"\" if df is None: df =", "str: return \"Species {self.name} with taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize", "Dict from abc import ABC from easy_sdm.data import ShapefileRegion import geopandas as gpd", "]\"\"\" end_of_records = False offset = 0 status = 200 df = None", "self.taxon_key = taxon_key self.name = name def __str__(self) -> str: return \"Species {self.name}", "\"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for d_col in desired_columns: if", "pd import requests from easy_sdm.configs import configs from easy_sdm.utils import logger from typing", "__init__(self, taxon_key: int, name: str): self.taxon_key = taxon_key self.name = name def __str__(self)", "self.__filter_species_in_region(gdf) if not (self.proposed_region is None) else gdf ) self.__gdf_memory = gdf return", "GBIF information about an species] Args: offset (int, optional): [Offsset is a parameter", "Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory = None def save_species_gdf(self,", "Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize", "gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, } r = requests.get(self.base_url,", "from abc import ABC from easy_sdm.data import ShapefileRegion import geopandas as gpd import", "str): self.taxon_key = taxon_key self.name = name def __str__(self) -> str: return \"Species", "result[\"year\"], }, ignore_index=True, ) return df def __refact_dict(self, result: Dict): \"\"\"Refact dict placing", "\"\"\" if df is None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\",", "None while end_of_records == False and status == 200: r, end_of_records, status =", "( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\",", "GBIF into geopandas geodataframes] \"\"\" def __init__( self, species: Species, proposed_region: Optional[ShapefileRegion] =", "= configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\",", "\"year\", \"occurrenceRemarks\", ] for d_col in desired_columns: if d_col not in columns: result[d_col]", "is None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\",", "self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset = len(df) + 1 self.__clean_species_df(df) return df", "{offset} with a status code of {r.status_code}.\" ) end_of_records = True else: r", "= GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory = None def get_specie_df(self): \"\"\"Get species as", "\"\"\"[This class organize data requested to GBIF into geopandas geodataframes] \"\"\" def __init__(", "from typing import Dict from abc import ABC from easy_sdm.data import ShapefileRegion import", "f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, } r = requests.get(self.base_url, params=params) status_code = r.status_code", "import os from typing import Dict from abc import ABC from easy_sdm.data import", "[type]: [int] \"\"\" gbif_configs = configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"],", "np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 1]", "\"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, } r =", "): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory = None def save_species_gdf(self, output_path: Path): if", "geopandas as gpd import numpy as np import pandas as pd import requests", "self, species: Species, proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory", "\"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset, } r = requests.get(self.base_url, params=params) status_code =", "[pd.DaraFrame]: [description] \"\"\" # Double check to certify there is no empty lat/long", "species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates = np.array( (", "[description] Returns: [df]: [description] \"\"\" if df is None: df = pd.DataFrame( columns=[", "result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, ) return df def __refact_dict(self, result: Dict): \"\"\"Refact", "= ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting Data by STATE_PROVINCE", "df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting Data by", "logger from typing import Dict, Optional from pathlib import Path class GBIFOccurencesRequester: \"\"\"[This", "if not (self.proposed_region is None) else gdf ) self.__gdf_memory = gdf return gdf", "species: Species, proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory =", "status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset = len(df) + 1 self.__clean_species_df(df)", "df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df = (", "Dict): \"\"\"Refact dict placing None in empty cells\"\"\" columns = result.keys() desired_columns =", "# Removing duplicate data df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df )", "SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant information from spescies geodataframes] \"\"\" def __init__(self,", "GBIF databse, since the requests have a limit of 300 row for request].", "{self.name} with taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize data requested to", "None ): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory = None def save_species_gdf(self, output_path: Path):", "\"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\" # Double", "gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]),", "offset = 0 status = 200 df = None while end_of_records == False", "def get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 1] def get_latitudes(self,): coordinates = self.get_coordinates()", "class makes request to GBIF] \"\"\" def __init__(self, taxon_key: int, species_name: str): self.taxon_key", "not (self.__gdf_memory is None): gdf = self.__gdf_memory else: df = self.get_specie_df() gdf =", "df = self.__build_species_df(r, df) offset = len(df) + 1 self.__clean_species_df(df) return df def", "Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf", "code of {r.status_code}.\" ) end_of_records = True else: r = r.json() end_of_records =", "not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf()", "(self.__gdf_memory is None): gdf = self.__gdf_memory else: df = self.get_specie_df() gdf = gpd.GeoDataFrame(", "return result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]): [description]", "Args: df ([type]): [description] request ([type]): [description] Returns: [df]: [description] \"\"\" if df", "df=None): \"\"\"[Create species dataframe with the request data] Args: df ([type]): [description] request", "dataframe with the request data] Args: df ([type]): [description] request ([type]): [description] Returns:", "pandas dataframes] \"\"\" def __init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name )", "{ \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"],", "by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data", "def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant", "] for d_col in desired_columns: if d_col not in columns: result[d_col] = None", "inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested to GBIF", "= 200 df = None while end_of_records == False and status == 200:", "from spescies geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe", "Returns: [df]: [description] \"\"\" if df is None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\",", "status_code class Species: def __init__(self, taxon_key: int, name: str): self.taxon_key = taxon_key self.name", "} r = requests.get(self.base_url, params=params) status_code = r.status_code if r.status_code != 200: logger.logging.info(", "= self.__clean_species_df(df) self.__df_memory = df return df def __request_species_df(self): \"\"\"[Organizes GBIF information in", "Returns: [type]: [int] \"\"\" gbif_configs = configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key), \"limit\":", "np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:,", "with the request data] Args: df ([type]): [description] request ([type]): [description] Returns: [df]:", "request in GBIF databse, since the requests have a limit of 300 row", "\"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for d_col in", "end_of_records, status_code class Species: def __init__(self, taxon_key: int, name: str): self.taxon_key = taxon_key", "np import pandas as pd import requests from easy_sdm.configs import configs from easy_sdm.utils", "self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory = df return df def __request_species_df(self): \"\"\"[Organizes GBIF", "= ( self.__filter_species_in_region(gdf) if not (self.proposed_region is None) else gdf ) self.__gdf_memory =", "result = self.__refact_dict(result) df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"],", "not (self.proposed_region is None) else gdf ) self.__gdf_memory = gdf return gdf def", "def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if self.__df_memory: df = self.__df_memory else: df", "optional): [Offsset is a parameter to where starting the request in GBIF databse,", "df ) # Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class", "def request(self, offset: int = 0): \"\"\"[ Request GBIF information about an species]", "{self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize data requested to GBIF into pandas dataframes]", "a parameter to where starting the request in GBIF databse, since the requests", "gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant information from", "= None while end_of_records == False and status == 200: r, end_of_records, status", "== False and status == 200: r, end_of_records, status = self.gbif_occ_requester.request(offset) df =", "species dataframe with the request data] Args: df ([type]): [description] request ([type]): [description]", "as pd import requests from easy_sdm.configs import configs from easy_sdm.utils import logger from", "= 0): \"\"\"[ Request GBIF information about an species] Args: offset (int, optional):", "{ \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\": offset,", "\"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\":", "result[d_col] = None return result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args:", "\"Species {self.name} with taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize data requested", ") gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if not (self.proposed_region is None)", "__request_species_df(self): \"\"\"[Organizes GBIF information in a dataframe considering offsets ]\"\"\" end_of_records = False", "= gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf)", "key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class organize data requested to GBIF into pandas", "SpeciesDFBuilder: \"\"\"[This class organize data requested to GBIF into pandas dataframes] \"\"\" def", ") return df def __refact_dict(self, result: Dict): \"\"\"Refact dict placing None in empty", ") end_of_records = True else: r = r.json() end_of_records = r[\"endOfRecords\"] return r,", "False offset = 0 status = 200 df = None while end_of_records ==", "columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] ) for", "df = df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\":", "None): gdf = self.__gdf_memory else: df = self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE,", "coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def get_longitudes(self,): coordinates", "is a parameter to where starting the request in GBIF databse, since the", "result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, ) return df def __refact_dict(self, result:", "GBIFOccurencesRequester: \"\"\"[This class makes request to GBIF] \"\"\" def __init__(self, taxon_key: int, species_name:", "is no empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing", "ShapefileRegion import geopandas as gpd import numpy as np import pandas as pd", "= self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory = df return df def __request_species_df(self): \"\"\"[Organizes", "self.__gdf_memory = gdf return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor:", "self.__df_memory else: df = self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory = df return df", "df is None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\",", "import configs from easy_sdm.utils import logger from typing import Dict, Optional from pathlib", "[description] Returns: [pd.DaraFrame]: [description] \"\"\" # Double check to certify there is no", "= df.append( { \"SCIENTIFIC_NAME\": result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"],", "species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory = None def get_specie_df(self):", "}, ignore_index=True, ) return df def __refact_dict(self, result: Dict): \"\"\"Refact dict placing None", "= self.__build_species_df(r, df) offset = len(df) + 1 self.__clean_species_df(df) return df def __build_species_df(self,", "= self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset = len(df) + 1 self.__clean_species_df(df) return", "status == 200: r, end_of_records, status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset", "d_col not in columns: result[d_col] = None return result def __clean_species_df(self, df: pd.DataFrame):", "geopandas geodataframes] \"\"\" def __init__( self, species: Species, proposed_region: Optional[ShapefileRegion] = None ):", "with a status code of {r.status_code}.\" ) end_of_records = True else: r =", "if df is None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\",", "while end_of_records == False and status == 200: r, end_of_records, status = self.gbif_occ_requester.request(offset)", "\"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, ) return df", "for d_col in desired_columns: if d_col not in columns: result[d_col] = None return", "geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if not (self.proposed_region", "self.proposed_region = proposed_region self.__gdf_memory = None def save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"):", "([type]): [description] Returns: [df]: [description] \"\"\" if df is None: df = pd.DataFrame(", "offset, } r = requests.get(self.base_url, params=params) status_code = r.status_code if r.status_code != 200:", "offset (int, optional): [Offsset is a parameter to where starting the request in", "class organize data requested to GBIF into pandas dataframes] \"\"\" def __init__(self, species:", "no empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate", "there is no empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] #", "def get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def", "f\"API call failed at offset {offset} with a status code of {r.status_code}.\" )", "= r.json() end_of_records = r[\"endOfRecords\"] return r, end_of_records, status_code class Species: def __init__(self,", "self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int = 0): \"\"\"[ Request GBIF information", "False and status == 200: r, end_of_records, status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r,", "duplicate data df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting", "species_name: str): self.taxon_key = taxon_key self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self,", "df def __request_species_df(self): \"\"\"[Organizes GBIF information in a dataframe considering offsets ]\"\"\" end_of_records", "if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf =", "to GBIF] \"\"\" def __init__(self, taxon_key: int, species_name: str): self.taxon_key = taxon_key self.species_name", "[description] request ([type]): [description] Returns: [df]: [description] \"\"\" if df is None: df", "== 200: r, end_of_records, status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df) offset =", "str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path)", "gbif_configs[\"country\"], \"offset\": offset, } r = requests.get(self.base_url, params=params) status_code = r.status_code if r.status_code", "None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\",", "abc import ABC from easy_sdm.data import ShapefileRegion import geopandas as gpd import numpy", "= df return df def __request_species_df(self): \"\"\"[Organizes GBIF information in a dataframe considering", "pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] )", "data requested to GBIF into pandas dataframes] \"\"\" def __init__(self, species: Species): self.gbif_occ_requester", "# Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This", "= False offset = 0 status = 200 df = None while end_of_records", "self.__df_memory = None def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if self.__df_memory: df =", "gdf return gdf def __filter_species_in_region(self, gdf: gpd.GeoDataFrame): return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper", "output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory is None):", "import numpy as np import pandas as pd import requests from easy_sdm.configs import", ").T return coordinates def get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 1] def get_latitudes(self,):", "requested to GBIF into pandas dataframes] \"\"\" def __init__(self, species: Species): self.gbif_occ_requester =", "gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if not (self.proposed_region is None) else", "taxon_key: int, species_name: str): self.taxon_key = taxon_key self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\"", "= None ): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory = None def save_species_gdf(self, output_path:", "raise TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def", "r.status_code if r.status_code != 200: logger.logging.info( f\"API call failed at offset {offset} with", "= taxon_key self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int =", "def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]:", "gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = (", "return coordinates def get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 1] def get_latitudes(self,): coordinates", "from easy_sdm.data import ShapefileRegion import geopandas as gpd import numpy as np import", "{r.status_code}.\" ) end_of_records = True else: r = r.json() end_of_records = r[\"endOfRecords\"] return", "= 0 status = 200 df = None while end_of_records == False and", "int, species_name: str): self.taxon_key = taxon_key self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def", "DataFrame\"\"\" if self.__df_memory: df = self.__df_memory else: df = self.__request_species_df() df = self.__clean_species_df(df)", "with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory", "self.__build_species_df(r, df) offset = len(df) + 1 self.__clean_species_df(df) return df def __build_species_df(self, request,", "\"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for d_col in desired_columns: if d_col not", "def __request_species_df(self): \"\"\"[Organizes GBIF information in a dataframe considering offsets ]\"\"\" end_of_records =", "GBIF] \"\"\" def __init__(self, taxon_key: int, species_name: str): self.taxon_key = taxon_key self.species_name =", "requested to GBIF into geopandas geodataframes] \"\"\" def __init__( self, species: Species, proposed_region:", "(int, optional): [Offsset is a parameter to where starting the request in GBIF", "to certify there is no empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df =", "Path class GBIFOccurencesRequester: \"\"\"[This class makes request to GBIF] \"\"\" def __init__(self, taxon_key:", "gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory is None): gdf =", "from easy_sdm.configs import configs from easy_sdm.utils import logger from typing import Dict, Optional", "offset {offset} with a status code of {r.status_code}.\" ) end_of_records = True else:", "name def __str__(self) -> str: return \"Species {self.name} with taxon key {self.taxon_key}\" class", "df = self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory = df return df def __request_species_df(self):", "result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]): [description] Returns:", "species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int = 0): \"\"\"[ Request GBIF", "= True else: r = r.json() end_of_records = r[\"endOfRecords\"] return r, end_of_records, status_code", "import geopandas as gpd import numpy as np import pandas as pd import", "= df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else", "df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested to", "import requests from easy_sdm.configs import configs from easy_sdm.utils import logger from typing import", "class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant information from spescies geodataframes] \"\"\" def", "end_of_records = True else: r = r.json() end_of_records = r[\"endOfRecords\"] return r, end_of_records,", "self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant information from spescies geodataframes] \"\"\"", "request]. Defaults to 0. Returns: [type]: [int] \"\"\" gbif_configs = configs[\"gbif\"] params =", "result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"],", "gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if not (self.proposed_region is None) else gdf )", "df = None while end_of_records == False and status == 200: r, end_of_records,", "logger.logging.info( f\"API call failed at offset {offset} with a status code of {r.status_code}.\"", "r[\"endOfRecords\"] return r, end_of_records, status_code class Species: def __init__(self, taxon_key: int, name: str):", "save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with shp\") output_path.parent.mkdir(parents=True,", "r = requests.get(self.base_url, params=params) status_code = r.status_code if r.status_code != 200: logger.logging.info( f\"API", "ends with shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not", "0): \"\"\"[ Request GBIF information about an species] Args: offset (int, optional): [Offsset", "about an species] Args: offset (int, optional): [Offsset is a parameter to where", "lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df", "empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data", "import Dict, Optional from pathlib import Path class GBIFOccurencesRequester: \"\"\"[This class makes request", "coordinates def get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 1] def get_latitudes(self,): coordinates =", "pathlib import Path class GBIFOccurencesRequester: \"\"\"[This class makes request to GBIF] \"\"\" def", "databse, since the requests have a limit of 300 row for request]. Defaults", "ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested to GBIF into", "Defaults to 0. Returns: [type]: [int] \"\"\" gbif_configs = configs[\"gbif\"] params = {", "status = 200 df = None while end_of_records == False and status ==", "in columns: result[d_col] = None return result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif", "since the requests have a limit of 300 row for request]. Defaults to", "to where starting the request in GBIF databse, since the requests have a", "\"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, )", "call failed at offset {offset} with a status code of {r.status_code}.\" ) end_of_records", "if self.__df_memory: df = self.__df_memory else: df = self.__request_species_df() df = self.__clean_species_df(df) self.__df_memory", "request, df=None): \"\"\"[Create species dataframe with the request data] Args: df ([type]): [description]", "__init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory = None def", "data] Args: df ([type]): [description] request ([type]): [description] Returns: [df]: [description] \"\"\" if", "in a dataframe considering offsets ]\"\"\" end_of_records = False offset = 0 status", "= None return result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df", "Returns: [pd.DaraFrame]: [description] \"\"\" # Double check to certify there is no empty", "[ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for", "\"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, ) return df def __refact_dict(self, result: Dict):", "\"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for d_col in desired_columns: if d_col", "from easy_sdm.utils import logger from typing import Dict, Optional from pathlib import Path", "df = self.get_specie_df() gdf = gpd.GeoDataFrame( df, geometry=gpd.points_from_xy(df.LONGITUDE, df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\")", "taxon_key: int, name: str): self.taxon_key = taxon_key self.name = name def __str__(self) ->", "\"YEAR\", ] ) for result in request[\"results\"]: result = self.__refact_dict(result) df = df.append(", "= name def __str__(self) -> str: return \"Species {self.name} with taxon key {self.taxon_key}\"", "[df]: [description] \"\"\" if df is None: df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\",", "0. Returns: [type]: [int] \"\"\" gbif_configs = configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key),", "(self.proposed_region is None) else gdf ) self.__gdf_memory = gdf return gdf def __filter_species_in_region(self,", "\"\"\"[Organizes GBIF information in a dataframe considering offsets ]\"\"\" end_of_records = False offset", "ignore_index=True, ) return df def __refact_dict(self, result: Dict): \"\"\"Refact dict placing None in", "the request in GBIF databse, since the requests have a limit of 300", "typing import Dict, Optional from pathlib import Path class GBIFOccurencesRequester: \"\"\"[This class makes", "the request data] Args: df ([type]): [description] request ([type]): [description] Returns: [df]: [description]", "result: Dict): \"\"\"Refact dict placing None in empty cells\"\"\" columns = result.keys() desired_columns", "Gbif Data] Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\" # Double check", "not in columns: result[d_col] = None return result def __clean_species_df(self, df: pd.DataFrame): \"\"\"[Cleaning", "result[\"scientificName\"], \"LONGITUDE\": result[\"decimalLongitude\"], \"LATITUDE\": result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"],", "where starting the request in GBIF databse, since the requests have a limit", "[int] \"\"\" gbif_configs = configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\":", "at offset {offset} with a status code of {r.status_code}.\" ) end_of_records = True", "\"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] ) for result in request[\"results\"]: result", "= self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory is None): gdf = self.__gdf_memory", "= np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), ) ).T return coordinates def get_longitudes(self,): coordinates =", "int = 0): \"\"\"[ Request GBIF information about an species] Args: offset (int,", "species.name ) self.__df_memory = None def get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if self.__df_memory:", "df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\" # Double check to certify there", "GBIF information in a dataframe considering offsets ]\"\"\" end_of_records = False offset =", "\"MONTH\", \"YEAR\", ] ) for result in request[\"results\"]: result = self.__refact_dict(result) df =", "easy_sdm.data import ShapefileRegion import geopandas as gpd import numpy as np import pandas", "a limit of 300 row for request]. Defaults to 0. Returns: [type]: [int]", "result[\"decimalLatitude\"], \"COUNTRY\": result[\"country\"], \"STATE_PROVINCE\": result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"],", "!= 200: logger.logging.info( f\"API call failed at offset {offset} with a status code", "\"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, ) return df def __refact_dict(self,", "def __init__(self, taxon_key: int, species_name: str): self.taxon_key = taxon_key self.species_name = species_name self.base_url", "\"\"\" def __init__(self, taxon_key: int, species_name: str): self.taxon_key = taxon_key self.species_name = species_name", "self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory = None def get_specie_df(self): \"\"\"Get species", "shp\") output_path.parent.mkdir(parents=True, exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory is", "proposed_region self.__gdf_memory = None def save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path", "is None) else gdf ) self.__gdf_memory = gdf return gdf def __filter_species_in_region(self, gdf:", "r, end_of_records, status_code class Species: def __init__(self, taxon_key: int, name: str): self.taxon_key =", "df = pd.DataFrame( columns=[ \"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\",", "= len(df) + 1 self.__clean_species_df(df) return df def __build_species_df(self, request, df=None): \"\"\"[Create species", "the requests have a limit of 300 row for request]. Defaults to 0.", "self.__clean_species_df(df) self.__df_memory = df return df def __request_species_df(self): \"\"\"[Organizes GBIF information in a", "columns = result.keys() desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\",", "certify there is no empty lat/long data df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])]", "exist_ok=True) gdf = self.get_species_gdf() gdf.to_file(output_path) def get_species_gdf(self): if not (self.__gdf_memory is None): gdf", "= { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\": gbif_configs[\"country\"], \"offset\":", "params=params) status_code = r.status_code if r.status_code != 200: logger.logging.info( f\"API call failed at", "result[\"stateProvince\"], \"IDENTIFICATION_DATE\": result[\"eventDate\"], \"DAY\": result[\"day\"], \"MONTH\": result[\"month\"], \"YEAR\": result[\"year\"], }, ignore_index=True, ) return", "Species: def __init__(self, taxon_key: int, name: str): self.taxon_key = taxon_key self.name = name", "def save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must ends with shp\")", "to extract relevant information from spescies geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) ->", "row for request]. Defaults to 0. Returns: [type]: [int] \"\"\" gbif_configs = configs[\"gbif\"]", "organize data requested to GBIF into pandas dataframes] \"\"\" def __init__(self, species: Species):", "GBIF into pandas dataframes] \"\"\" def __init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key,", "# Double check to certify there is no empty lat/long data df =", "df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True,", "df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class organize data requested to GBIF into geopandas geodataframes]", ") ).T return coordinates def get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 1] def", "def __init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory = None", "desired_columns: if d_col not in columns: result[d_col] = None return result def __clean_species_df(self,", "name: str): self.taxon_key = taxon_key self.name = name def __str__(self) -> str: return", "df.LATITUDE) ) gdf = gdf.set_crs(f\"EPSG:{configs['maps']['default_epsg']}\") gdf = ( self.__filter_species_in_region(gdf) if not (self.proposed_region is", "int, name: str): self.taxon_key = taxon_key self.name = name def __str__(self) -> str:", "information from spescies geodataframes] \"\"\" def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe =", "\"day\", \"month\", \"year\", \"occurrenceRemarks\", ] for d_col in desired_columns: if d_col not in", "get_longitudes(self,): coordinates = self.get_coordinates() return coordinates[:, 1] def get_latitudes(self,): coordinates = self.get_coordinates() return", "desired_columns = [ \"scientificName\", \"decimalLongitude\", \"decimalLatitude\", \"country\", \"stateProvince\", \"eventDate\", \"day\", \"month\", \"year\", \"occurrenceRemarks\",", "data df = df[pd.notnull(df[\"LATITUDE\"])] df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df =", "get_specie_df(self): \"\"\"Get species as DataFrame\"\"\" if self.__df_memory: df = self.__df_memory else: df =", "class organize data requested to GBIF into geopandas geodataframes] \"\"\" def __init__( self,", "Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df class SpeciesGDFBuilder(SpeciesDFBuilder): \"\"\"[This class", "else df ) # Sorting Data by STATE_PROVINCE df.sort_values(\"STATE_PROVINCE\", inplace=True, ignore_index=True) return df", "-> str: return \"Species {self.name} with taxon key {self.taxon_key}\" class SpeciesDFBuilder: \"\"\"[This class", "__init__(self, taxon_key: int, species_name: str): self.taxon_key = taxon_key self.species_name = species_name self.base_url =", "starting the request in GBIF databse, since the requests have a limit of", "data df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"] else df ) # Sorting Data", "if not (self.__gdf_memory is None): gdf = self.__gdf_memory else: df = self.get_specie_df() gdf", "d_col in desired_columns: if d_col not in columns: result[d_col] = None return result", "df = df[pd.notnull(df[\"LONGITUDE\"])] # Removing duplicate data df = ( df.drop_duplicates(ignore_index=True) if configs[\"gbif\"][\"drop_duplicates\"]", "return self.proposed_region.get_points_inside(gdf) class SpeciesInfoExtractor: \"\"\"[A Wrapper to extract relevant information from spescies geodataframes]", "= r.status_code if r.status_code != 200: logger.logging.info( f\"API call failed at offset {offset}", "\"\"\" def __init__(self, species: Species): self.gbif_occ_requester = GBIFOccurencesRequester( species.taxon_key, species.name ) self.__df_memory =", "None: self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates = np.array( ( np.array(self.species_geodataframe[\"LATITUDE\"]), np.array(self.species_geodataframe[\"LONGITUDE\"]), )", "r = r.json() end_of_records = r[\"endOfRecords\"] return r, end_of_records, status_code class Species: def", "\"SCIENTIFIC_NAME\", \"LONGITUDE\", \"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] ) for result", "([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\" # Double check to certify there is", "\"LATITUDE\", \"COUNTRY\", \"STATE_PROVINCE\", \"IDENTIFICATION_DATE\", \"DAY\", \"MONTH\", \"YEAR\", ] ) for result in request[\"results\"]:", "configs[\"gbif\"] params = { \"taxonKey\": str(self.taxon_key), \"limit\": gbif_configs[\"one_request_limit\"], \"hasCoordinate\": True, \"year\": f\"{gbif_configs['low_year']},{gbif_configs['up_year']}\", \"country\":", "return r, end_of_records, status_code class Species: def __init__(self, taxon_key: int, name: str): self.taxon_key", "and status == 200: r, end_of_records, status = self.gbif_occ_requester.request(offset) df = self.__build_species_df(r, df)", "200: logger.logging.info( f\"API call failed at offset {offset} with a status code of", "self.__gdf_memory = None def save_species_gdf(self, output_path: Path): if not str(output_path).endswith(\".shp\"): raise TypeError(\"output_path must", "df ([type]): [description] request ([type]): [description] Returns: [df]: [description] \"\"\" if df is", "def __str__(self) -> str: return \"Species {self.name} with taxon key {self.taxon_key}\" class SpeciesDFBuilder:", "= \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int = 0): \"\"\"[ Request GBIF information about", "def __init__( self, species: Species, proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region =", "pd.DataFrame): \"\"\"[Cleaning Gbif Data] Args: df ([pd.DaraFrame]): [description] Returns: [pd.DaraFrame]: [description] \"\"\" #", "super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory = None def save_species_gdf(self, output_path: Path): if not", "def __init__(self, species_geodataframe: gpd.GeoDataFrame) -> None: self.species_geodataframe = species_geodataframe def get_coordinates(self,): coordinates =", "\"\"\"[ Request GBIF information about an species] Args: offset (int, optional): [Offsset is", "proposed_region: Optional[ShapefileRegion] = None ): super().__init__(species) self.proposed_region = proposed_region self.__gdf_memory = None def", "self.taxon_key = taxon_key self.species_name = species_name self.base_url = \"http://api.gbif.org/v1/occurrence/search\" def request(self, offset: int" ]
[ "self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3), (5, 4, 10,", "rois = self.link.xp.array(( (0, 0, 10, 10), (0, 1000, 0, 1000), (0, 0,", "labels, scores = self.link.decode( rois, roi_indices, locs, confs, (0.4, 0.2), ((100, 100), (200,", "@testing.parameterize( {'n_class': 1 + 1}, {'n_class': 5 + 1}, {'n_class': 20 + 1},", "scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self):", "test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois", "((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), xp.array(((10, 4, 12, 10),),", "xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ())", "12, 10),), dtype=np.float32), ] roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,),", "= [ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes = [", "xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0])", "1 + 1}, {'n_class': 5 + 1}, {'n_class': 20 + 1}, ) class", "_check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20, 81, 4))) confs = chainer.Variable(_random_array(xp, (20, 81)))", "self._check_distribute() def _check_decode(self): rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0,", "loc_loss, conf_loss = head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array,", "10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,),", "rois, roi_indices, gt_locs, gt_labels = head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois),", "roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ())", "10, 6)), dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ xp.array((0,),", "self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:],", "4, 12, 10),), dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32),", "import head_loss_pre def _random_array(xp, shape): return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class':", "4)), _random_array(xp, (7, 4)), _random_array(xp, (8, 4)), ] gt_labels = [ xp.random.randint(0, 80,", "= chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels, scores = self.link.decode( rois, roi_indices, locs,", "81))) roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32),", "xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels,", "dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l in", "self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np)", "dtype=np.int32), ] bboxes = [ xp.array(((2, 4, 6, 7), (1, 12, 3, 30)),", "8))), ] rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1,", "for l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0],", "(4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call()", "size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post( locs, confs, roi_indices, gt_locs,", "2, 3), (5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ]", "[ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs = self.link(hs,", "chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu()", "self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class", "12, 3, 30)), dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32), ] labels = [", "import attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import", "def _check_decode(self): rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1,", "self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self):", "roi_indices, gt_locs, gt_labels = head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3)", "roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l in range(3): self.assertIsInstance(rois[l],", "def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois = self.link.xp.array((", "6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32),", "test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32),", "chainer.testing import attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn", "(4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self):", "= [ xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array( ((0, 1, 2, 3), (5,", "self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l],", "dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes = [ xp.array(((2, 4, 6, 7), (1, 12,", "self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def", "(0, 0, 10, 10), (0, 1000, 0, 1000), (0, 0, 224, 224), (100,", "self.link.xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12,", "in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0],", "size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1}, {'n_class': 5 + 1}, {'n_class': 20", "3), (5, 4, 10, 6)), dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices", "size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs = [ _random_array(xp, (5,", "1000, 0, 1000), (0, 0, 224, 224), (100, 100, 224, 224), ), dtype=np.float32)", "= self.link.decode( rois, roi_indices, locs, confs, (0.4, 0.2), ((100, 100), (200, 200)), 0.5,", "self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l", "= Head(n_class=self.n_class, scales=(1 / 2, 1 / 4, 1 / 8)) def _check_call(self):", "scores = self.link.decode( rois, roi_indices, locs, confs, (0.4, 0.2), ((100, 100), (200, 200)),", "10),), dtype=np.float32), ] roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32),", "gt_locs, gt_labels = head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices),", "1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1}, {'n_class': 5 + 1}, {'n_class':", "chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu def test_head_loss_post_gpu(self): import cupy", "[ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels = head_loss_pre(", "2, size=8).astype(np.int32), ] gt_locs = [ _random_array(xp, (5, 4)), _random_array(xp, (7, 4)), _random_array(xp,", "= head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape,", "def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois = [", "n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0])", "rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array,", "rois, roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels),", "self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp, (4,", "self.link.to_gpu() self._check_call() def _check_distribute(self): rois = self.link.xp.array(( (0, 0, 10, 10), (0, 1000,", "self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs,", "6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,), dtype=np.int32),", "self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:],", "xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,))", "_random_array(xp, (7, 4)), _random_array(xp, (8, 4)), ] gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32),", "self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape,", "dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs = self.link(hs, rois, roi_indices)", "self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois = [ xp.array(((4, 1, 6, 3),),", "[ xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array( ((0, 1, 2, 3), (5, 4,", "self.link.xp.array(( (0, 0, 10, 10), (0, 1000, 0, 1000), (0, 0, 224, 224),", "(7, 4)), _random_array(xp, (8, 4)), ] gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0,", "4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [", "] labels = [ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs,", "TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class, scales=(1 / 2, 1 / 4, 1", "_check_distribute(self): rois = self.link.xp.array(( (0, 0, 10, 10), (0, 1000, 0, 1000), (0,", "self.link.xp.array((1,), dtype=np.int32), ] locs, confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray)", "(4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu", "(100, 100, 224, 224), ), dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32)", "1, 6, 3),), dtype=np.float32), xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)),", "64, 8, 8))), ] rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array(", "dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4,", "1}, {'n_class': 5 + 1}, {'n_class': 20 + 1}, ) class TestHead(unittest.TestCase): def", "self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4)))", "0.2), ((100, 100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2)", "self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu def test_head_loss_post_gpu(self): import cupy self._check_head_loss_post(cupy) testing.run_module(__name__, __file__)", "from chainer import testing from chainer.testing import attr from chainercv.links.model.fpn import Head from", "2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray)", "{'n_class': 5 + 1}, {'n_class': 20 + 1}, ) class TestHead(unittest.TestCase): def setUp(self):", "100, 224, 224), ), dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois,", "] roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes", "2, 3), (5, 4, 10, 6)), dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32), ]", "= [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs =", "from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre def", "+ 1}, ) class TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class, scales=(1 / 2,", "def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs", "as np import unittest import chainer from chainer import testing from chainer.testing import", "self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois = [ xp.array(((4, 1, 6,", "2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs = [ _random_array(xp,", "import unittest import chainer from chainer import testing from chainer.testing import attr from", "xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs = [", "np import unittest import chainer from chainer import testing from chainer.testing import attr", "roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs =", "(2, 64, 8, 8))), ] rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32),", "xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs = [ _random_array(xp, (5, 4)),", "test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs =", "self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0],", "chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels,", "16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ] rois = [ self.link.xp.array(((4, 1,", "81, 4))) confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32),", "64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ] rois = [ self.link.xp.array(((4,", "locs, confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss,", "= [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp,", "__future__ import division import numpy as np import unittest import chainer from chainer", "(2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ] rois = [", "0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l", "xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1}, {'n_class': 5 +", "10), (0, 1000, 0, 1000), (0, 0, 224, 224), (100, 100, 224, 224),", "self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels, scores = self.link.decode(", "self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def", "self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices),", "xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels = head_loss_pre( rois,", "self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self):", "return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1}, {'n_class': 5", "32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ]", "self.link.to_gpu() self._check_distribute() def _check_decode(self): rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array(", "3) self.assertEqual(len(roi_indices), 3) for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0])", "= [ _random_array(xp, (5, 4)), _random_array(xp, (7, 4)), _random_array(xp, (8, 4)), ] gt_labels", "head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape): return xp.array( np.random.uniform(-1, 1, size=shape),", "= self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3)", "self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4) def", "] rois, roi_indices, gt_locs, gt_labels = head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes, labels)", "dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1,", "1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32),", "1000), (0, 0, 224, 224), (100, 100, 224, 224), ), dtype=np.float32) roi_indices =", "_check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16,", "16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ] rois = [ self.link.xp.array(((4, 1, 6,", "l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ())", "= head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs),", "self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0),", "self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu def test_head_loss_post_gpu(self): import cupy self._check_head_loss_post(cupy)", "7), (1, 12, 3, 30)), dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32), ] labels", "conf_loss = head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray)", "labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu", "chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64,", "size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs = [ _random_array(xp, (5, 4)), _random_array(xp, (7,", "xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes = [ xp.array(((2, 4,", "xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array( ((0, 1, 2, 3), (5, 4, 10,", "rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l in range(3):", "_random_array(xp, (5, 4)), _random_array(xp, (7, 4)), _random_array(xp, (8, 4)), ] gt_labels = [", "self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois = self.link.xp.array(( (0, 0,", "self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:],", "1}, {'n_class': 20 + 1}, ) class TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class,", "chainer.Variable(_random_array(xp, (20, 81, 4))) confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [ xp.random.randint(0,", "self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self):", "Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape): return", "0, 10, 10), (0, 1000, 0, 1000), (0, 0, 224, 224), (100, 100,", "+ 1}, {'n_class': 5 + 1}, {'n_class': 20 + 1}, ) class TestHead(unittest.TestCase):", "] locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp, (4,", "xp.array(((2, 4, 6, 7), (1, 12, 3, 30)), dtype=np.float32), xp.array(((10, 2, 12, 12),),", "xp): locs = chainer.Variable(_random_array(xp, (20, 81, 4))) confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices", "4, 6, 7), (1, 12, 3, 30)), dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32),", "xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs = [ _random_array(xp, (5, 4)), _random_array(xp, (7, 4)),", "100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n", "4)), ] gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80,", "12, 10),), dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,),", "chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp,", "dtype=np.float32), ] roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ]", "unittest import chainer from chainer import testing from chainer.testing import attr from chainercv.links.model.fpn", "1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)),", "dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array,", "self._check_call() def _check_distribute(self): rois = self.link.xp.array(( (0, 0, 10, 10), (0, 1000, 0,", "confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2,", "chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape): return xp.array( np.random.uniform(-1,", "dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1,", "from __future__ import division import numpy as np import unittest import chainer from", "test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois = [ self.link.xp.array(((4,", "[ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss", "@attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois = [", "test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20, 81, 4)))", "self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4)", "32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ] rois", "(20, 81))) roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2,", "roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray)", "def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64,", "64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))),", "] locs, confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4,", "self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs = chainer.Variable(_random_array(", "roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes =", "self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l],", "self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois =", "locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class)))", "()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self):", "def _check_distribute(self): rois = self.link.xp.array(( (0, 0, 10, 10), (0, 1000, 0, 1000),", "locs, confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class,", "self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np)", "self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self,", "shape): return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1}, {'n_class':", "class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois = [ xp.array(((4, 1, 6, 3),), dtype=np.float32),", "xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post( locs, confs,", "10, 10), (0, 1000, 0, 1000), (0, 0, 224, 224), (100, 100, 224,", "xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), xp.array(((10, 4, 12,", "np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1}, {'n_class': 5 + 1},", "confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable)", "gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu", "= self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray)", "()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase):", "self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n],", "size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss,", "(4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def", "self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu def", "((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),),", "self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:],", "[ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes = [ xp.array(((2,", "from chainer.testing import attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post from", "roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array,", "self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self):", "self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def", "6)), dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ xp.array((0,), dtype=np.int32),", "for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:],", "4, 1 / 8)) def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32,", "3),), dtype=np.float32), xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), xp.array(((10,", "chainer from chainer import testing from chainer.testing import attr from chainercv.links.model.fpn import Head", "import head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape): return xp.array( np.random.uniform(-1, 1,", "bboxes, labels, scores = self.link.decode( rois, roi_indices, locs, confs, (0.4, 0.2), ((100, 100),", "def setUp(self): self.link = Head(n_class=self.n_class, scales=(1 / 2, 1 / 4, 1 /", "roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4) def test_distribute_cpu(self):", "setUp(self): self.link = Head(n_class=self.n_class, scales=(1 / 2, 1 / 4, 1 / 8))", "[ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2,", "_random_array(xp, (8, 4)), ] gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32),", "labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l in range(3):", "] loc_loss, conf_loss = head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable)", "self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu def test_head_loss_post_gpu(self): import", "chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0,", "range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,))", "import division import numpy as np import unittest import chainer from chainer import", "for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0],", "gt_labels = head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3)", "cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20, 81, 4))) confs =", "= [ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels =", "self.assertEqual(len(scores), 2) for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0],", "8, 8))), ] rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0,", "1, 0, 0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3)", "(200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n in", "self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class))", "3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10,", "_check_head_loss_pre(self, xp): rois = [ xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array( ((0, 1,", "self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois = [ self.link.xp.array(((4, 1,", "xp.array((1,), dtype=np.int32), ] bboxes = [ xp.array(((2, 4, 6, 7), (1, 12, 3,", "[ _random_array(xp, (5, 4)), _random_array(xp, (7, 4)), _random_array(xp, (8, 4)), ] gt_labels =", "import Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape):", "l in range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def", "dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs =", "self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:],", "self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def", "1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32),", "= [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3), (5,", "2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs = [ _random_array(xp, (5, 4)), _random_array(xp,", "chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self):", "(5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices =", "0, 0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for", "rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3),", "attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre", "bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l in", "size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post( locs,", "()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp):", "def _check_head_loss_pre(self, xp): rois = [ xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array( ((0,", "0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs", "gt_locs = [ _random_array(xp, (5, 4)), _random_array(xp, (7, 4)), _random_array(xp, (8, 4)), ]", "] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs", "= chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes,", "self.link.decode( rois, roi_indices, locs, confs, (0.4, 0.2), ((100, 100), (200, 200)), 0.5, 0.1)", "dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels = head_loss_pre( rois, roi_indices, (0.1,", "80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post(", "8)) def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2,", "self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n],", "4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels = head_loss_pre( rois, roi_indices,", "self.n_class))) bboxes, labels, scores = self.link.decode( rois, roi_indices, locs, confs, (0.4, 0.2), ((100,", "self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp,", "80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post( locs, confs, roi_indices,", "5 + 1}, {'n_class': 20 + 1}, ) class TestHead(unittest.TestCase): def setUp(self): self.link", "xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels = head_loss_pre( rois, roi_indices, (0.1, 0.2),", "chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape): return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize(", "80, size=8).astype(np.int32), ] loc_loss, conf_loss = head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels, 2)", "0, 224, 224), (100, 100, 224, 224), ), dtype=np.float32) roi_indices = self.link.xp.array((0, 1,", "4))) confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0,", "def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois = [ xp.array(((4,", "confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels, scores = self.link.decode( rois, roi_indices,", "xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1, 0),", "4))) confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels, scores = self.link.decode( rois,", "+ 1}, {'n_class': 20 + 1}, ) class TestHead(unittest.TestCase): def setUp(self): self.link =", "import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20, 81, 4))) confs", "12),), dtype=np.float32), ] labels = [ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois,", "1}, ) class TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class, scales=(1 / 2, 1", "3) for l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray)", "(4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode()", "self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0],", "gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray)", "self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ())", "self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois =", "roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices) self.assertEqual(len(rois),", "4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois =", "= [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss,", "scales=(1 / 2, 1 / 4, 1 / 8)) def _check_call(self): hs =", "1 / 8)) def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))),", "(4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois", "confs, (0.4, 0.2), ((100, 100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2)", "/ 2, 1 / 4, 1 / 8)) def _check_call(self): hs = [", "@attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois = self.link.xp.array(( (0, 0, 10,", "3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l in range(3): self.assertIsInstance(rois[l], xp.ndarray)", "200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n in range(2):", "head_loss_post( locs, confs, roi_indices, gt_locs, gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ())", "6, 7), (1, 12, 3, 30)), dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32), ]", "def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois = [ self.link.xp.array(((4, 1, 6, 3),),", "_check_decode(self): rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2,", "] bboxes = [ xp.array(((2, 4, 6, 7), (1, 12, 3, 30)), dtype=np.float32),", "self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu()", "gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self):", "= chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32),", "head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3)", "self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def", "0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n in range(2): self.assertIsInstance(bboxes[n],", "self.link = Head(n_class=self.n_class, scales=(1 / 2, 1 / 4, 1 / 8)) def", "numpy as np import unittest import chainer from chainer import testing from chainer.testing", "224, 224), (100, 100, 224, 224), ), dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0,", "def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois = self.link.xp.array(( (0, 0, 10, 10),", "xp.array(((10, 2, 12, 12),), dtype=np.float32), ] labels = [ xp.array((10, 4), dtype=np.float32), xp.array((1,),", "10),), dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32),", "= self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs,", "self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4) def test_distribute_cpu(self): self._check_distribute()", "xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu def test_head_loss_post_gpu(self): import cupy self._check_head_loss_post(cupy) testing.run_module(__name__,", "@attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20,", "/ 8)) def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp,", "(0, 0, 224, 224), (100, 100, 224, 224), ), dtype=np.float32) roi_indices = self.link.xp.array((0,", "chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ] rois =", "224), (100, 100, 224, 224), ), dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0, 0),", "[ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs =", "2, 1 / 4, 1 / 8)) def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp,", "locs, confs, (0.4, 0.2), ((100, 100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels),", "rois = [ xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array( ((0, 1, 2, 3),", "self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20, 81, 4))) confs = chainer.Variable(_random_array(xp,", "2) for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0])", "def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20, 81, 4))) confs = chainer.Variable(_random_array(xp, (20,", "(0, 1000, 0, 1000), (0, 0, 224, 224), (100, 100, 224, 224), ),", "()) def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self,", "self.link.xp, (4, self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels, scores", "(1, 12, 3, 30)), dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32), ] labels =", "{'n_class': 20 + 1}, ) class TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class, scales=(1", "xp): rois = [ xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array( ((0, 1, 2,", "dtype=np.float32), ] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ]", "xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu", "] gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32),", "from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape): return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32)", "30)), dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32), ] labels = [ xp.array((10, 4),", "def test_decode_cpu(self): self._check_decode() @attr.gpu def test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp):", "TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois = [ xp.array(((4, 1, 6, 3),), dtype=np.float32), xp.array(", "xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes = [ xp.array(((2, 4, 6, 7),", "3), (5, 4, 10, 6)), dtype=np.float32), self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices", "gt_labels, 2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape,", "= self.link.xp.array(( (0, 0, 10, 10), (0, 1000, 0, 1000), (0, 0, 224,", "dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1}, {'n_class': 5 + 1}, {'n_class': 20 +", "(0.4, 0.2), ((100, 100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores),", ") class TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class, scales=(1 / 2, 1 /", "12, 12),), dtype=np.float32), ] labels = [ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ]", "import chainer from chainer import testing from chainer.testing import attr from chainercv.links.model.fpn import", "in range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self):", "(20, 81, 4))) confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [ xp.random.randint(0, 2,", "testing from chainer.testing import attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import head_loss_post", "4)), _random_array(xp, (8, 4)), ] gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80,", "2, 12, 12),), dtype=np.float32), ] labels = [ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32),", "] gt_locs = [ _random_array(xp, (5, 4)), _random_array(xp, (7, 4)), _random_array(xp, (8, 4)),", "self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ())", "chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))), ] rois = [ self.link.xp.array(((4, 1, 6, 3),),", "(5, 4)), _random_array(xp, (7, 4)), _random_array(xp, (8, 4)), ] gt_labels = [ xp.random.randint(0,", "4, 12, 10),), dtype=np.float32), ] roi_indices = [ xp.array((0,), dtype=np.int32), xp.array((1, 0), dtype=np.int32),", "] rois = [ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2,", "xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ] loc_loss, conf_loss =", "head_loss_pre def _random_array(xp, shape): return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1", "(8, 4)), ] gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0,", "self.link.distribute(rois, roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l],", "self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:],", "3, 30)), dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32), ] labels = [ xp.array((10,", "(5, 4, 10, 6)), dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices =", "3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray)", "dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp,", "roi_indices, locs, confs, (0.4, 0.2), ((100, 100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2)", "0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes = [ xp.array(((2, 4, 6, 7), (1,", "labels = [ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels", "0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l", "224, 224), ), dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois, roi_indices", "self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for l in range(3): self.assertIsInstance(rois[l],", "test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois = self.link.xp.array(( (0,", "] roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs,", "self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def", "in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:],", "0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray)", "self.assertEqual(len(roi_indices), 3) for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:],", "range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for", "(2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))), chainer.Variable(_random_array(self.link.xp, (2, 64, 8,", "for l in range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute()", "), dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois,", "self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode() @attr.gpu def", "= [ xp.array(((2, 4, 6, 7), (1, 12, 3, 30)), dtype=np.float32), xp.array(((10, 2,", "dtype=np.int32), ] locs, confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape,", "@attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois = [ self.link.xp.array(((4, 1, 6,", "dtype=np.float32), xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32), xp.array(((10, 4,", "[ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp,", "hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))), chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))),", "def _random_array(xp, shape): return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 +", "self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def", "size=8).astype(np.int32), ] gt_locs = [ _random_array(xp, (5, 4)), _random_array(xp, (7, 4)), _random_array(xp, (8,", "dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class,", "()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy", "dtype=np.int32), xp.array((1, 0), dtype=np.int32), xp.array((1,), dtype=np.int32), ] bboxes = [ xp.array(((2, 4, 6,", "3) for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,))", "gt_labels = [ xp.random.randint(0, 80, size=5).astype(np.int32), xp.random.randint(0, 80, size=7).astype(np.int32), xp.random.randint(0, 80, size=8).astype(np.int32), ]", "self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in", "= chainer.Variable(_random_array(xp, (20, 81, 4))) confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices = [", "xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,))", "self.assertEqual(len(gt_labels), 3) for l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l],", "roi_indices = [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ]", "Head(n_class=self.n_class, scales=(1 / 2, 1 / 4, 1 / 8)) def _check_call(self): hs", "dtype=np.float32), xp.array(((10, 2, 12, 12),), dtype=np.float32), ] labels = [ xp.array((10, 4), dtype=np.float32),", "in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0]", "self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import", "224), ), dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois, roi_indices =", "class TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class, scales=(1 / 2, 1 / 4,", "roi_indices = [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs", "/ 4, 1 / 8)) def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2, 64,", "self.assertIsInstance(roi_indices[l], self.link.xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[1:], (4,)) self.assertEqual(roi_indices[l].shape[1:], ()) self.assertEqual(sum(rois[l].shape[0] for l in range(3)),", "chainer import testing from chainer.testing import attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn", "locs = chainer.Variable(_random_array(xp, (20, 81, 4))) confs = chainer.Variable(_random_array(xp, (20, 81))) roi_indices =", "20 + 1}, ) class TestHead(unittest.TestCase): def setUp(self): self.link = Head(n_class=self.n_class, scales=(1 /", "chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4,", "def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy) def _check_head_loss_post(self, xp): locs = chainer.Variable(_random_array(xp, (20, 81,", "from chainercv.links.model.fpn import head_loss_post from chainercv.links.model.fpn import head_loss_pre def _random_array(xp, shape): return xp.array(", "((100, 100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes), 2) self.assertEqual(len(labels), 2) self.assertEqual(len(scores), 2) for", "self.assertEqual(gt_locs[l].shape[1:], (4,)) self.assertEqual(gt_labels[l].shape[1:], ()) def test_head_loss_pre_cpu(self): self._check_head_loss_pre(np) @attr.gpu def test_head_loss_pre_gpu(self): import cupy self._check_head_loss_pre(cupy)", "self.link.xp, (4, self.n_class))) bboxes, labels, scores = self.link.decode( rois, roi_indices, locs, confs, (0.4,", "4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def", "chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels, scores = self.link.decode( rois, roi_indices, locs, confs,", "self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def test_decode_cpu(self): self._check_decode()", "dtype=np.float32) roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32) rois, roi_indices = self.link.distribute(rois, roi_indices)", "test_decode_gpu(self): self.link.to_gpu() self._check_decode() class TestHeadLoss(unittest.TestCase): def _check_head_loss_pre(self, xp): rois = [ xp.array(((4, 1,", "self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu()", "dtype=np.float32), ] rois, roi_indices, gt_locs, gt_labels = head_loss_pre( rois, roi_indices, (0.1, 0.2), bboxes,", "= [ xp.random.randint(0, 2, size=5).astype(np.int32), xp.random.randint(0, 2, size=7).astype(np.int32), xp.random.randint(0, 2, size=8).astype(np.int32), ] gt_locs", "test_call_gpu(self): self.link.to_gpu() self._check_call() def _check_distribute(self): rois = self.link.xp.array(( (0, 0, 10, 10), (0,", "4, 10, 6)), dtype=np.float32), xp.array(((10, 4, 12, 10),), dtype=np.float32), ] roi_indices = [", "= [ self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs = chainer.Variable(_random_array(", "(0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3) for", "3) self.assertEqual(len(gt_labels), 3) for l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray)", "2) self.assertEqual(len(scores), 2) for n in range(2): self.assertIsInstance(bboxes[n], self.link.xp.ndarray) self.assertIsInstance(labels[n], self.link.xp.ndarray) self.assertIsInstance(scores[n], self.link.xp.ndarray)", "self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable) self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu", "range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0]) self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0])", "()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ()) def test_head_loss_post_cpu(self): self._check_head_loss_post(np) @attr.gpu def test_head_loss_post_gpu(self):", "roi_indices, (0.1, 0.2), bboxes, labels) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) self.assertEqual(len(gt_locs), 3) self.assertEqual(len(gt_labels), 3)", "import numpy as np import unittest import chainer from chainer import testing from", "1 / 4, 1 / 8)) def _check_call(self): hs = [ chainer.Variable(_random_array(self.link.xp, (2,", "0, 1000), (0, 0, 224, 224), (100, 100, 224, 224), ), dtype=np.float32) roi_indices", "6, 3),), dtype=np.float32), xp.array( ((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32),", "self.assertIsInstance(confs.array, self.link.xp.ndarray) self.assertEqual(confs.shape, (4, self.n_class)) def test_call_cpu(self): self._check_call() @attr.gpu def test_call_gpu(self): self.link.to_gpu() self._check_call()", "rois, roi_indices, locs, confs, (0.4, 0.2), ((100, 100), (200, 200)), 0.5, 0.1) self.assertEqual(len(bboxes),", "confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4))", "self.assertIsInstance(scores[n], self.link.xp.ndarray) self.assertEqual(bboxes[n].shape[0], labels[n].shape[0]) self.assertEqual(bboxes[n].shape[0], scores[n].shape[0]) self.assertEqual(bboxes[n].shape[1:], (4,)) self.assertEqual(labels[n].shape[1:], ()) self.assertEqual(scores[n].shape[1:], ()) def", "2) self.assertIsInstance(loc_loss, chainer.Variable) self.assertIsInstance(loc_loss.array, xp.ndarray) self.assertEqual(loc_loss.shape, ()) self.assertIsInstance(conf_loss, chainer.Variable) self.assertIsInstance(conf_loss.array, xp.ndarray) self.assertEqual(conf_loss.shape, ())", "dtype=np.float32), ] labels = [ xp.array((10, 4), dtype=np.float32), xp.array((1,), dtype=np.float32), ] rois, roi_indices,", "[ xp.array(((2, 4, 6, 7), (1, 12, 3, 30)), dtype=np.float32), xp.array(((10, 2, 12,", "roi_indices) self.assertEqual(len(rois), 3) self.assertEqual(len(roi_indices), 3) for l in range(3): self.assertIsInstance(rois[l], self.link.xp.ndarray) self.assertIsInstance(roi_indices[l], self.link.xp.ndarray)", "self.link.xp.array((0,), dtype=np.int32), self.link.xp.array((1, 0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs = self.link(hs, rois,", "bboxes = [ xp.array(((2, 4, 6, 7), (1, 12, 3, 30)), dtype=np.float32), xp.array(((10,", "[ self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32), self.link.xp.array( ((0, 1, 2, 3), (5, 4,", "{'n_class': 1 + 1}, {'n_class': 5 + 1}, {'n_class': 20 + 1}, )", "0), dtype=np.int32), self.link.xp.array((1,), dtype=np.int32), ] locs, confs = self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable)", "import testing from chainer.testing import attr from chainercv.links.model.fpn import Head from chainercv.links.model.fpn import", "division import numpy as np import unittest import chainer from chainer import testing", "(4, self.n_class, 4))) confs = chainer.Variable(_random_array( self.link.xp, (4, self.n_class))) bboxes, labels, scores =", "l in range(3): self.assertIsInstance(rois[l], xp.ndarray) self.assertIsInstance(roi_indices[l], xp.ndarray) self.assertIsInstance(gt_locs[l], xp.ndarray) self.assertIsInstance(gt_labels[l], xp.ndarray) self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0])", "(4, self.n_class))) bboxes, labels, scores = self.link.decode( rois, roi_indices, locs, confs, (0.4, 0.2),", "self.link(hs, rois, roi_indices) self.assertIsInstance(locs, chainer.Variable) self.assertIsInstance(locs.array, self.link.xp.ndarray) self.assertEqual(locs.shape, (4, self.n_class, 4)) self.assertIsInstance(confs, chainer.Variable)", "_random_array(xp, shape): return xp.array( np.random.uniform(-1, 1, size=shape), dtype=np.float32) @testing.parameterize( {'n_class': 1 + 1},", "range(3)), 4) def test_distribute_cpu(self): self._check_distribute() @attr.gpu def test_distribute_gpu(self): self.link.to_gpu() self._check_distribute() def _check_decode(self): rois" ]
[ "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "type_name with ArtifactType. # TODO(b/125348988): Add support for real Channel in addition to", "\"\"\" if self.type_name != expected_type_name: raise TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name))) def", "__future__ import print_function import collections from typing import Iterable from typing import Optional", "a static Artifact collection. return self._artifacts def type_check(self, expected_type_name: Text) -> None: \"\"\"Checks", "\"\"\"Tfx Channel. TFX Channel is an abstract concept that connects data producers and", "raise TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel:", "for the specific language governing permissions and # limitations under the License. \"\"\"TFX", "for real Channel in addition to static ones. def __init__(self, type_name: Text, artifacts:", "found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection of", "str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection of the same", "this file except in compliance with the License. # You may obtain a", "types class Channel(object): \"\"\"Tfx Channel. TFX Channel is an abstract concept that connects", "LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "query against a Channel # instead of a static Artifact collection. return self._artifacts", "elif isinstance(source, collections.Iterable): try: first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name,", "import Optional from typing import Text from typing import Union from tfx.utils import", "Channel's artifact type {}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that", "from tfx.utils import types class Channel(object): \"\"\"Tfx Channel. TFX Channel is an abstract", "ANY KIND, either express or implied. # See the License for the specific", "[] self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__() def", "from this Channel. Returns: An artifact collection. \"\"\" # TODO(b/125037186): We should support", "self._artifacts def type_check(self, expected_type_name: Text) -> None: \"\"\"Checks whether a Channel has the", "Args: expected_type_name: Expected type_name to check against. Raises: TypeError if the type_name of", "\"\"\" self.type_name = type_name self._artifacts = artifacts or [] self._validate_type() def __str__(self): return", "from typing import Iterable from typing import Optional from typing import Text from", "in self._artifacts: if artifact.type_name != self.type_name: raise ValueError( \"Artifacts provided do not match", "self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__() def _validate_type(self)", "containing the source artifact collection. Raises: ValueError when source is not a non-empty", "Expected type_name to check against. Raises: TypeError if the type_name of given Channel", "collection of artifacts as the values that can be read from the Channel.", "source to be a channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot convert empty artifact", "Channel): return source elif isinstance(source, collections.Iterable): try: first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact):", "addition to static ones. def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "contains restriction of the artifact type that should be fed into or read", "TODO(b/124763842): Consider replace type_name with ArtifactType. # TODO(b/125348988): Add support for real Channel", "typing import Iterable from typing import Optional from typing import Text from typing", "a Channel or an iterable of TfxArtifact. Returns: A static Channel containing the", "under the License. \"\"\"TFX Channel definition.\"\"\" from __future__ import absolute_import from __future__ import", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "from the Channel. This is used to construct a static Channel. \"\"\" self.type_name", "takes. \"\"\" # TODO(b/124763842): Consider replace type_name with ArtifactType. # TODO(b/125348988): Add support", "OF ANY KIND, either express or implied. # See the License for the", "from the Channel. artifacts: (Optional) A collection of artifacts as the values that", "Artifact collection. return self._artifacts def type_check(self, expected_type_name: Text) -> None: \"\"\"Checks whether a", "__repr__(self): return self.__str__() def _validate_type(self) -> None: for artifact in self._artifacts: if artifact.type_name", "Returns: A static Channel containing the source artifact collection. Raises: ValueError when source", "\"\"\"TFX Channel definition.\"\"\" from __future__ import absolute_import from __future__ import division from __future__", "static ones. def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel.", "ValueError( \"Artifacts provided do not match Channel's artifact type {}\" .format(self.type_name)) def get(self)", "restriction of the artifact type that should be fed into or read from", "Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source to be a channel: {}'.format(source)) except", "a non-empty iterable of TfxArtifact. \"\"\" if isinstance(source, Channel): return source elif isinstance(source,", "return source elif isinstance(source, collections.Iterable): try: first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact): return", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "-> None: \"\"\"Checks whether a Channel has the expected type name. Args: expected_type_name:", "as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection of the same artifact type", "if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source to be", "type into a Channel. Args: source: Either a Channel or an iterable of", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "absolute_import from __future__ import division from __future__ import print_function import collections from typing", "try: first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else: raise", "connects data producers and data consumers. It contains restriction of the artifact type", "\"\"\" # TODO(b/124763842): Consider replace type_name with ArtifactType. # TODO(b/125348988): Add support for", "def type_check(self, expected_type_name: Text) -> None: \"\"\"Checks whether a Channel has the expected", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel. Args: type_name: Name of the type that", "typing import Union from tfx.utils import types class Channel(object): \"\"\"Tfx Channel. TFX Channel", "artifact.type_name != self.type_name: raise ValueError( \"Artifacts provided do not match Channel's artifact type", "from typing import Optional from typing import Text from typing import Union from", "for artifact in self._artifacts: if artifact.type_name != self.type_name: raise ValueError( \"Artifacts provided do", "from typing import Text from typing import Union from tfx.utils import types class", "type the Channel takes. \"\"\" # TODO(b/124763842): Consider replace type_name with ArtifactType. #", "def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel. Args: type_name:", "into or read from it. Attributes: type_name: A string representing the artifact type", "iterable of TfxArtifact. \"\"\" if isinstance(source, Channel): return source elif isinstance(source, collections.Iterable): try:", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "the artifact type the Channel takes. \"\"\" # TODO(b/124763842): Consider replace type_name with", "Channel. \"\"\" self.type_name = type_name self._artifacts = artifacts or [] self._validate_type() def __str__(self):", "# TODO(b/125348988): Add support for real Channel in addition to static ones. def", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "self._artifacts) def __repr__(self): return self.__str__() def _validate_type(self) -> None: for artifact in self._artifacts:", "is an abstract concept that connects data producers and data consumers. It contains", "class Channel(object): \"\"\"Tfx Channel. TFX Channel is an abstract concept that connects data", "StopIteration: raise ValueError('Cannot convert empty artifact collection into Channel') else: raise ValueError('Invalid source", "A collection of artifacts as the values that can be read from the", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "\"\"\" if isinstance(source, Channel): return source elif isinstance(source, collections.Iterable): try: first_element = next(iter(source))", "required by applicable law or agreed to in writing, software # distributed under", "= type_name self._artifacts = artifacts or [] self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name,", "<gh_stars>0 # Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under", "empty artifact collection into Channel') else: raise ValueError('Invalid source to be a channel:", "applicable law or agreed to in writing, software # distributed under the License", "We should support dynamic query against a Channel # instead of a static", "get from this Channel. Returns: An artifact collection. \"\"\" # TODO(b/125037186): We should", "TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts", "that connects data producers and data consumers. It contains restriction of the artifact", "a Channel # instead of a static Artifact collection. return self._artifacts def type_check(self,", "or agreed to in writing, software # distributed under the License is distributed", "from typing import Union from tfx.utils import types class Channel(object): \"\"\"Tfx Channel. TFX", "self._artifacts = artifacts or [] self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def", "artifacts or [] self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "artifact type {}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can", "TODO(b/125037186): We should support dynamic query against a Channel # instead of a", "ones. def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel. Args:", "Text from typing import Union from tfx.utils import types class Channel(object): \"\"\"Tfx Channel.", "whether a Channel has the expected type name. Args: expected_type_name: Expected type_name to", "static Artifact collection. return self._artifacts def type_check(self, expected_type_name: Text) -> None: \"\"\"Checks whether", "return Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source to be a channel: {}'.format(source))", "in addition to static ones. def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None):", "support dynamic query against a Channel # instead of a static Artifact collection.", "import collections from typing import Iterable from typing import Optional from typing import", "collection of the same artifact type into a Channel. Args: source: Either a", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "writing, software # distributed under the License is distributed on an \"AS IS\"", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License. \"\"\"TFX Channel definition.\"\"\" from __future__ import absolute_import from __future__ import division", "License. # You may obtain a copy of the License at # #", "Channel. artifacts: (Optional) A collection of artifacts as the values that can be", "the artifact type that should be fed into or read from it. Attributes:", "A string representing the artifact type the Channel takes. \"\"\" # TODO(b/124763842): Consider", "compliance with the License. # You may obtain a copy of the License", "{}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection of the", "to static ones. def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of", "same artifact type into a Channel. Args: source: Either a Channel or an", "provided do not match Channel's artifact type {}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "{} but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact", "type_name to check against. Raises: TypeError if the type_name of given Channel is", "type_name of given Channel is different from expected. \"\"\" if self.type_name != expected_type_name:", "language governing permissions and # limitations under the License. \"\"\"TFX Channel definition.\"\"\" from", "convert empty artifact collection into Channel') else: raise ValueError('Invalid source to be a", "that can be get from this Channel. Returns: An artifact collection. \"\"\" #", "different from expected. \"\"\" if self.type_name != expected_type_name: raise TypeError('Expected {} but found", "should support dynamic query against a Channel # instead of a static Artifact", "self.type_name: raise ValueError( \"Artifacts provided do not match Channel's artifact type {}\" .format(self.type_name))", "name. Args: expected_type_name: Expected type_name to check against. Raises: TypeError if the type_name", "Union from tfx.utils import types class Channel(object): \"\"\"Tfx Channel. TFX Channel is an", "not use this file except in compliance with the License. # You may", "Name of the type that should be fed into or read from the", "read from the Channel. artifacts: (Optional) A collection of artifacts as the values", "and data consumers. It contains restriction of the artifact type that should be", "a Channel. Args: source: Either a Channel or an iterable of TfxArtifact. Returns:", "type_check(self, expected_type_name: Text) -> None: \"\"\"Checks whether a Channel has the expected type", "License, Version 2.0 (the \"License\"); # you may not use this file except", "type_name: Name of the type that should be fed into or read from", "a Channel has the expected type name. Args: expected_type_name: Expected type_name to check", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "Channel. Returns: An artifact collection. \"\"\" # TODO(b/125037186): We should support dynamic query", "be read from the Channel. This is used to construct a static Channel.", "artifact in self._artifacts: if artifact.type_name != self.type_name: raise ValueError( \"Artifacts provided do not", "import division from __future__ import print_function import collections from typing import Iterable from", "producers and data consumers. It contains restriction of the artifact type that should", "data producers and data consumers. It contains restriction of the artifact type that", "# you may not use this file except in compliance with the License.", "'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__() def _validate_type(self) -> None: for artifact", "agreed to in writing, software # distributed under the License is distributed on", "the source artifact collection. Raises: ValueError when source is not a non-empty iterable", "\"\"\"Converts artifact collection of the same artifact type into a Channel. Args: source:", "read from the Channel. This is used to construct a static Channel. \"\"\"", "(the \"License\"); # you may not use this file except in compliance with", "the same artifact type into a Channel. Args: source: Either a Channel or", "the Channel. artifacts: (Optional) A collection of artifacts as the values that can", "Args: source: Either a Channel or an iterable of TfxArtifact. Returns: A static", "is not a non-empty iterable of TfxArtifact. \"\"\" if isinstance(source, Channel): return source", "to construct a static Channel. \"\"\" self.type_name = type_name self._artifacts = artifacts or", "import print_function import collections from typing import Iterable from typing import Optional from", "artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel. Args: type_name: Name of the type", "# Unless required by applicable law or agreed to in writing, software #", "the type_name of given Channel is different from expected. \"\"\" if self.type_name !=", "can be read from the Channel. This is used to construct a static", "by applicable law or agreed to in writing, software # distributed under the", "types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source to be a channel:", "_validate_type(self) -> None: for artifact in self._artifacts: if artifact.type_name != self.type_name: raise ValueError(", "be get from this Channel. Returns: An artifact collection. \"\"\" # TODO(b/125037186): We", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "the specific language governing permissions and # limitations under the License. \"\"\"TFX Channel", "to check against. Raises: TypeError if the type_name of given Channel is different", "This is used to construct a static Channel. \"\"\" self.type_name = type_name self._artifacts", "collections from typing import Iterable from typing import Optional from typing import Text", "Channel definition.\"\"\" from __future__ import absolute_import from __future__ import division from __future__ import", "Channel or an iterable of TfxArtifact. Returns: A static Channel containing the source", "Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can be get from this Channel. Returns: An", "type_name self._artifacts = artifacts or [] self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts)", "file except in compliance with the License. # You may obtain a copy", "isinstance(source, Channel): return source elif isinstance(source, collections.Iterable): try: first_element = next(iter(source)) if isinstance(first_element,", "from __future__ import division from __future__ import print_function import collections from typing import", "TfxArtifact. Returns: A static Channel containing the source artifact collection. Raises: ValueError when", "self.__str__() def _validate_type(self) -> None: for artifact in self._artifacts: if artifact.type_name != self.type_name:", "self._artifacts: if artifact.type_name != self.type_name: raise ValueError( \"Artifacts provided do not match Channel's", "License for the specific language governing permissions and # limitations under the License.", "-> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can be get from this Channel. Returns:", "-> None: for artifact in self._artifacts: if artifact.type_name != self.type_name: raise ValueError( \"Artifacts", "Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel. Args: type_name: Name of the", "to in writing, software # distributed under the License is distributed on an", "or read from it. Attributes: type_name: A string representing the artifact type the", "artifacts that can be get from this Channel. Returns: An artifact collection. \"\"\"", "Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version", "Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection of the same artifact type into a", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Raises: ValueError when source is not a non-empty iterable of TfxArtifact. \"\"\" if", "non-empty iterable of TfxArtifact. \"\"\" if isinstance(source, Channel): return source elif isinstance(source, collections.Iterable):", "artifacts=source) else: raise ValueError('Invalid source to be a channel: {}'.format(source)) except StopIteration: raise", "values that can be read from the Channel. This is used to construct", "abstract concept that connects data producers and data consumers. It contains restriction of", "it. Attributes: type_name: A string representing the artifact type the Channel takes. \"\"\"", "a static Channel. \"\"\" self.type_name = type_name self._artifacts = artifacts or [] self._validate_type()", "an iterable of TfxArtifact. Returns: A static Channel containing the source artifact collection.", "expected_type_name: Expected type_name to check against. Raises: TypeError if the type_name of given", "or implied. # See the License for the specific language governing permissions and", "\"\"\"Initialization of Channel. Args: type_name: Name of the type that should be fed", "print_function import collections from typing import Iterable from typing import Optional from typing", "Raises: TypeError if the type_name of given Channel is different from expected. \"\"\"", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "can be get from this Channel. Returns: An artifact collection. \"\"\" # TODO(b/125037186):", "an abstract concept that connects data producers and data consumers. It contains restriction", "Text) -> None: \"\"\"Checks whether a Channel has the expected type name. Args:", "Args: type_name: Name of the type that should be fed into or read", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection of the same artifact", "in writing, software # distributed under the License is distributed on an \"AS", "Channel is an abstract concept that connects data producers and data consumers. It", "of Channel. Args: type_name: Name of the type that should be fed into", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid", "of given Channel is different from expected. \"\"\" if self.type_name != expected_type_name: raise", "Iterable from typing import Optional from typing import Text from typing import Union", "match Channel's artifact type {}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts", "not match Channel's artifact type {}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all", "ValueError('Cannot convert empty artifact collection into Channel') else: raise ValueError('Invalid source to be", "real Channel in addition to static ones. def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]]", "Channel. This is used to construct a static Channel. \"\"\" self.type_name = type_name", "that can be read from the Channel. This is used to construct a", "that should be fed into or read from the Channel. artifacts: (Optional) A", "ArtifactType. # TODO(b/125348988): Add support for real Channel in addition to static ones.", "not a non-empty iterable of TfxArtifact. \"\"\" if isinstance(source, Channel): return source elif", "__future__ import absolute_import from __future__ import division from __future__ import print_function import collections", "source artifact collection. Raises: ValueError when source is not a non-empty iterable of", "is different from expected. \"\"\" if self.type_name != expected_type_name: raise TypeError('Expected {} but", "fed into or read from the Channel. artifacts: (Optional) A collection of artifacts", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "License. \"\"\"TFX Channel definition.\"\"\" from __future__ import absolute_import from __future__ import division from", "you may not use this file except in compliance with the License. #", "artifact type the Channel takes. \"\"\" # TODO(b/124763842): Consider replace type_name with ArtifactType.", "Channel has the expected type name. Args: expected_type_name: Expected type_name to check against.", "__init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel. Args: type_name: Name", "limitations under the License. \"\"\"TFX Channel definition.\"\"\" from __future__ import absolute_import from __future__", "if self.type_name != expected_type_name: raise TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source:", "else: raise ValueError('Invalid source to be a channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot", "\"Artifacts provided do not match Channel's artifact type {}\" .format(self.type_name)) def get(self) ->", "# limitations under the License. \"\"\"TFX Channel definition.\"\"\" from __future__ import absolute_import from", "import Text from typing import Union from tfx.utils import types class Channel(object): \"\"\"Tfx", "permissions and # limitations under the License. \"\"\"TFX Channel definition.\"\"\" from __future__ import", "against. Raises: TypeError if the type_name of given Channel is different from expected.", "Channel: \"\"\"Converts artifact collection of the same artifact type into a Channel. Args:", "Channel. Args: type_name: Name of the type that should be fed into or", "use this file except in compliance with the License. # You may obtain", "source: Either a Channel or an iterable of TfxArtifact. Returns: A static Channel", "fed into or read from it. Attributes: type_name: A string representing the artifact", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "None: for artifact in self._artifacts: if artifact.type_name != self.type_name: raise ValueError( \"Artifacts provided", "tfx.utils import types class Channel(object): \"\"\"Tfx Channel. TFX Channel is an abstract concept", "artifact collection. \"\"\" # TODO(b/125037186): We should support dynamic query against a Channel", "ValueError('Invalid source to be a channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot convert empty", "TFX Channel is an abstract concept that connects data producers and data consumers.", "into or read from the Channel. artifacts: (Optional) A collection of artifacts as", "__str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__() def _validate_type(self) -> None:", "get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can be get from this Channel.", "2.0 (the \"License\"); # you may not use this file except in compliance", "import Iterable from typing import Optional from typing import Text from typing import", "the type that should be fed into or read from the Channel. artifacts:", "typing import Optional from typing import Text from typing import Union from tfx.utils", "Channel is different from expected. \"\"\" if self.type_name != expected_type_name: raise TypeError('Expected {}", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "construct a static Channel. \"\"\" self.type_name = type_name self._artifacts = artifacts or []", "if artifact.type_name != self.type_name: raise ValueError( \"Artifacts provided do not match Channel's artifact", "None: \"\"\"Checks whether a Channel has the expected type name. Args: expected_type_name: Expected", "import absolute_import from __future__ import division from __future__ import print_function import collections from", "should be fed into or read from it. Attributes: type_name: A string representing", "A static Channel containing the source artifact collection. Raises: ValueError when source is", "channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot convert empty artifact collection into Channel') else:", "the Channel takes. \"\"\" # TODO(b/124763842): Consider replace type_name with ArtifactType. # TODO(b/125348988):", "# # Unless required by applicable law or agreed to in writing, software", "express or implied. # See the License for the specific language governing permissions", "An artifact collection. \"\"\" # TODO(b/125037186): We should support dynamic query against a", "with ArtifactType. # TODO(b/125348988): Add support for real Channel in addition to static", "data consumers. It contains restriction of the artifact type that should be fed", "# instead of a static Artifact collection. return self._artifacts def type_check(self, expected_type_name: Text)", "Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache", "from expected. \"\"\" if self.type_name != expected_type_name: raise TypeError('Expected {} but found {}.'.format(expected_type_name,", "either express or implied. # See the License for the specific language governing", "artifact collection into Channel') else: raise ValueError('Invalid source to be a channel: {}'.format(source))", "Channel. Args: source: Either a Channel or an iterable of TfxArtifact. Returns: A", "= artifacts or [] self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self):", "type_name: A string representing the artifact type the Channel takes. \"\"\" # TODO(b/124763842):", "be fed into or read from it. Attributes: type_name: A string representing the", "{}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__() def _validate_type(self) -> None: for artifact in", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] = None): \"\"\"Initialization of Channel. Args: type_name: Name of", "Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection of the same artifact type into", "# TODO(b/125037186): We should support dynamic query against a Channel # instead of", "__future__ import division from __future__ import print_function import collections from typing import Iterable", "replace type_name with ArtifactType. # TODO(b/125348988): Add support for real Channel in addition", "but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) -> Channel: \"\"\"Converts artifact collection", "the License. # You may obtain a copy of the License at #", "has the expected type name. Args: expected_type_name: Expected type_name to check against. Raises:", "collection. Raises: ValueError when source is not a non-empty iterable of TfxArtifact. \"\"\"", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__() def _validate_type(self) -> None: for", "type that should be fed into or read from it. Attributes: type_name: A", "expected_type_name: raise TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]]) ->", "governing permissions and # limitations under the License. \"\"\"TFX Channel definition.\"\"\" from __future__", "def _validate_type(self) -> None: for artifact in self._artifacts: if artifact.type_name != self.type_name: raise", "into a Channel. Args: source: Either a Channel or an iterable of TfxArtifact.", "should be fed into or read from the Channel. artifacts: (Optional) A collection", "of artifacts as the values that can be read from the Channel. This", "= next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source", "from __future__ import absolute_import from __future__ import division from __future__ import print_function import", "from it. Attributes: type_name: A string representing the artifact type the Channel takes.", "return self.__str__() def _validate_type(self) -> None: for artifact in self._artifacts: if artifact.type_name !=", "expected type name. Args: expected_type_name: Expected type_name to check against. Raises: TypeError if", "self.type_name != expected_type_name: raise TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel,", "# TODO(b/124763842): Consider replace type_name with ArtifactType. # TODO(b/125348988): Add support for real", "of TfxArtifact. Returns: A static Channel containing the source artifact collection. Raises: ValueError", "ValueError when source is not a non-empty iterable of TfxArtifact. \"\"\" if isinstance(source,", "Optional from typing import Text from typing import Union from tfx.utils import types", "def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can be get from this", "of a static Artifact collection. return self._artifacts def type_check(self, expected_type_name: Text) -> None:", "artifact type into a Channel. Args: source: Either a Channel or an iterable", "specific language governing permissions and # limitations under the License. \"\"\"TFX Channel definition.\"\"\"", "dynamic query against a Channel # instead of a static Artifact collection. return", "iterable of TfxArtifact. Returns: A static Channel containing the source artifact collection. Raises:", "if isinstance(source, Channel): return source elif isinstance(source, collections.Iterable): try: first_element = next(iter(source)) if", "-> Channel: \"\"\"Converts artifact collection of the same artifact type into a Channel.", "with the License. # You may obtain a copy of the License at", "\"\"\" # TODO(b/125037186): We should support dynamic query against a Channel # instead", "when source is not a non-empty iterable of TfxArtifact. \"\"\" if isinstance(source, Channel):", "the values that can be read from the Channel. This is used to", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "and # limitations under the License. \"\"\"TFX Channel definition.\"\"\" from __future__ import absolute_import", "of the same artifact type into a Channel. Args: source: Either a Channel", "is used to construct a static Channel. \"\"\" self.type_name = type_name self._artifacts =", "concept that connects data producers and data consumers. It contains restriction of the", "Channel(object): \"\"\"Tfx Channel. TFX Channel is an abstract concept that connects data producers", "raise ValueError('Invalid source to be a channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot convert", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "{}'.format(source)) except StopIteration: raise ValueError('Cannot convert empty artifact collection into Channel') else: raise", "source elif isinstance(source, collections.Iterable): try: first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel(", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "expected_type_name: Text) -> None: \"\"\"Checks whether a Channel has the expected type name.", "return self._artifacts def type_check(self, expected_type_name: Text) -> None: \"\"\"Checks whether a Channel has", "be a channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot convert empty artifact collection into", "Channel takes. \"\"\" # TODO(b/124763842): Consider replace type_name with ArtifactType. # TODO(b/125348988): Add", "next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source to", "collections.Iterable): try: first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else:", "against a Channel # instead of a static Artifact collection. return self._artifacts def", "a channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot convert empty artifact collection into Channel')", "TypeError if the type_name of given Channel is different from expected. \"\"\" if", "that should be fed into or read from it. Attributes: type_name: A string", "TfxArtifact. \"\"\" if isinstance(source, Channel): return source elif isinstance(source, collections.Iterable): try: first_element =", "of TfxArtifact. \"\"\" if isinstance(source, Channel): return source elif isinstance(source, collections.Iterable): try: first_element", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "Returns: An artifact collection. \"\"\" # TODO(b/125037186): We should support dynamic query against", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "!= expected_type_name: raise TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name))) def as_channel(source: Union[Channel, Iterable[types.TfxArtifact]])", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "!= self.type_name: raise ValueError( \"Artifacts provided do not match Channel's artifact type {}\"", "Channel in addition to static ones. def __init__(self, type_name: Text, artifacts: Optional[Iterable[types.TfxArtifact]] =", "Add support for real Channel in addition to static ones. def __init__(self, type_name:", "# Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the", "collection. return self._artifacts def type_check(self, expected_type_name: Text) -> None: \"\"\"Checks whether a Channel", "do not match Channel's artifact type {}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns", "given Channel is different from expected. \"\"\" if self.type_name != expected_type_name: raise TypeError('Expected", "raise ValueError('Cannot convert empty artifact collection into Channel') else: raise ValueError('Invalid source to", "See the License for the specific language governing permissions and # limitations under", "type {}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can be", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "{}\" .format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can be get", "as the values that can be read from the Channel. This is used", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "read from it. Attributes: type_name: A string representing the artifact type the Channel", "Attributes: type_name: A string representing the artifact type the Channel takes. \"\"\" #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "support for real Channel in addition to static ones. def __init__(self, type_name: Text,", "\"\"\"Checks whether a Channel has the expected type name. Args: expected_type_name: Expected type_name", "expected. \"\"\" if self.type_name != expected_type_name: raise TypeError('Expected {} but found {}.'.format(expected_type_name, str(self.type_name)))", "of the type that should be fed into or read from the Channel.", "from __future__ import print_function import collections from typing import Iterable from typing import", "the expected type name. Args: expected_type_name: Expected type_name to check against. Raises: TypeError", "static Channel. \"\"\" self.type_name = type_name self._artifacts = artifacts or [] self._validate_type() def", "artifacts: (Optional) A collection of artifacts as the values that can be read", "artifact collection. Raises: ValueError when source is not a non-empty iterable of TfxArtifact.", "def __repr__(self): return self.__str__() def _validate_type(self) -> None: for artifact in self._artifacts: if", "None): \"\"\"Initialization of Channel. Args: type_name: Name of the type that should be", "except StopIteration: raise ValueError('Cannot convert empty artifact collection into Channel') else: raise ValueError('Invalid", "Channel containing the source artifact collection. Raises: ValueError when source is not a", "used to construct a static Channel. \"\"\" self.type_name = type_name self._artifacts = artifacts", "static Channel containing the source artifact collection. Raises: ValueError when source is not", "artifact type that should be fed into or read from it. Attributes: type_name:", "artifacts as the values that can be read from the Channel. This is", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "\"\"\"Returns all artifacts that can be get from this Channel. Returns: An artifact", "Channel # instead of a static Artifact collection. return self._artifacts def type_check(self, expected_type_name:", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "of the artifact type that should be fed into or read from it.", "Consider replace type_name with ArtifactType. # TODO(b/125348988): Add support for real Channel in", "raise ValueError( \"Artifacts provided do not match Channel's artifact type {}\" .format(self.type_name)) def", "import types class Channel(object): \"\"\"Tfx Channel. TFX Channel is an abstract concept that", "typing import Text from typing import Union from tfx.utils import types class Channel(object):", "or [] self._validate_type() def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__()", "import Union from tfx.utils import types class Channel(object): \"\"\"Tfx Channel. TFX Channel is", "= None): \"\"\"Initialization of Channel. Args: type_name: Name of the type that should", "check against. Raises: TypeError if the type_name of given Channel is different from", "It contains restriction of the artifact type that should be fed into or", "2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License,", "if the type_name of given Channel is different from expected. \"\"\" if self.type_name", "consumers. It contains restriction of the artifact type that should be fed into", "source is not a non-empty iterable of TfxArtifact. \"\"\" if isinstance(source, Channel): return", "isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source to be a", "Channel. TFX Channel is an abstract concept that connects data producers and data", "type name. Args: expected_type_name: Expected type_name to check against. Raises: TypeError if the", "string representing the artifact type the Channel takes. \"\"\" # TODO(b/124763842): Consider replace", "or an iterable of TfxArtifact. Returns: A static Channel containing the source artifact", "TODO(b/125348988): Add support for real Channel in addition to static ones. def __init__(self,", "representing the artifact type the Channel takes. \"\"\" # TODO(b/124763842): Consider replace type_name", "Either a Channel or an iterable of TfxArtifact. Returns: A static Channel containing", "definition.\"\"\" from __future__ import absolute_import from __future__ import division from __future__ import print_function", "instead of a static Artifact collection. return self._artifacts def type_check(self, expected_type_name: Text) ->", "division from __future__ import print_function import collections from typing import Iterable from typing", "all artifacts that can be get from this Channel. Returns: An artifact collection.", "type_name=first_element.type_name, artifacts=source) else: raise ValueError('Invalid source to be a channel: {}'.format(source)) except StopIteration:", "def __str__(self): return 'Channel<{}: {}>'.format(self.type_name, self._artifacts) def __repr__(self): return self.__str__() def _validate_type(self) ->", "or read from the Channel. artifacts: (Optional) A collection of artifacts as the", "(Optional) A collection of artifacts as the values that can be read from", "self.type_name = type_name self._artifacts = artifacts or [] self._validate_type() def __str__(self): return 'Channel<{}:", "type that should be fed into or read from the Channel. artifacts: (Optional)", "isinstance(source, collections.Iterable): try: first_element = next(iter(source)) if isinstance(first_element, types.TfxArtifact): return Channel( type_name=first_element.type_name, artifacts=source)", "be fed into or read from the Channel. artifacts: (Optional) A collection of", ".format(self.type_name)) def get(self) -> Iterable[types.TfxArtifact]: \"\"\"Returns all artifacts that can be get from", "this Channel. Returns: An artifact collection. \"\"\" # TODO(b/125037186): We should support dynamic", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "the Channel. This is used to construct a static Channel. \"\"\" self.type_name =", "collection. \"\"\" # TODO(b/125037186): We should support dynamic query against a Channel #", "to be a channel: {}'.format(source)) except StopIteration: raise ValueError('Cannot convert empty artifact collection", "artifact collection of the same artifact type into a Channel. Args: source: Either" ]
[ "is not None: login(request, user) return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password')", "last_name = request.POST.get('last_name') email = request.POST.get('email') if not request.POST.get('password') or not request.POST.get('password2'): messages.error(request,", "is False \\ or any(c.islower() for c in password2) is False \\ or", "if user is None: messages.error(request, 'Something went wrong') return render(request, 'change-password.html') login(request, new_user)", "messages.error(request, 'Password has already been changed') return redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request,", "request.POST.get( 'new_password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'change-password.html') old_password =", "stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if not token_generator.check_token(user, token): messages.error(request, 'Password", "characters' '(There should be letters, lowercase letters, numbers and special characters)') return render(request,", "= User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if not token_generator.check_token(user, token):", "'Incorrect e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self, request, uidb64, token): try:", "= f'Contact form(Sent by user {name} {surname}' email_body = message administrators = User.objects.filter(is_superuser=True)", "exist in the database') return render(request, 'login.html') class RegisterView(View): def get(self, request): return", "new_password2: messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email,", "{surname}' email_body = message administrators = User.objects.filter(is_superuser=True) if not name or not surname", "kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Password reset' activation_url = f'http://{domain}{link}' email_body =", "user.save() messages.success(request, 'Data has been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request,", "redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self, request, user_id): if not request.POST.get('old_password') or not", "in password2) is False \\ or any(c.isdigit() for c in password2) is False:", "post(self, request, user_id): if not request.POST.get('old_password') or not request.POST.get('new_password1') or not request.POST.get( 'new_password2'):", "any(not c.isalnum() for c in password2) is False \\ or any(c.isupper() for c", "donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations,", "messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request, user_id): if request.user.id", "{user}, your activation link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request,", ") messages.success(request, 'Check your e-mail account for further information') return render(request, 'register.html') class", "render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def", "User from django.contrib.auth import authenticate, login, logout from django.contrib import messages from django.core.mail", "render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail does not exist in the database')", "return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self, request, user_id): if not request.POST.get('old_password') or", "user.last_name = last_name user.email = email user.save() messages.success(request, 'Data has been changed') return", "def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html')", "correctly') return redirect('/') for administrator in administrators: email = administrator.email send_mail( email_subject, email_body,", "domain = get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Password", "in password2) is False: messages.error(request, 'The password does not have all special characters'", "account' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, your activation link: {activation_url}' send_mail(", "or not request.POST.get( 'new_password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'change-password.html')", "name = request.POST.get('name') surname = request.POST.get('surname') email = request.POST.get('email') password = request.POST.get('password') password2", "changed') return redirect('login-page') def post(self, request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user =", "redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/')", "<PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save() TokenTemporaryStorage.objects.get(user=user).delete() messages.success(request, 'Password changed", "force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from .utils", "except ObjectDoesNotExist: messages.error(request, 'Given e-mail does not exist in the database') return render(request,", "TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request, 'Account is", "messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save() TokenTemporaryStorage.objects.get(user=user).delete() messages.success(request, 'Password changed successfully')", "username=request.user.email, password=old_password) if user is None: messages.error(request, 'Old password incorrect') return render(request, 'change-password.html')", "from django.contrib.sites.shortcuts import get_current_site from .utils import token_generator from django.core.exceptions import ObjectDoesNotExist class", "or password is already changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or", "DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from .utils import", "user.is_active = True user.save() messages.success(request, 'Account successfully activated') return redirect('login-page') else: messages.error(request, 'Incorrect", "'password-reset.html') def post(self, request): email = request.POST.get('email') try: user = User.objects.get(email=email) uidb64 =", "return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is", "password2) is False \\ or any(c.isupper() for c in password2) is False \\", "token=token) domain = get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject =", "\\ or any(c.isdigit() for c in password2) is False: messages.error(request, 'The password does", "!= <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname,", "request.POST.get('password2') if len(password) < 8 or len(password2) < 8: messages.error(request, 'Password too short", "return render(request, 'change-password.html') def post(self, request, user_id): if not request.POST.get('old_password') or not request.POST.get('new_password1')", "'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request,", "= authenticate(request, username=request.user.email, password=old_password) if user is None: messages.error(request, 'Old password incorrect') return", "request.POST.get('last_name') email = request.POST.get('email') if not request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please fill", "= request.POST.get('first_name') last_name = request.POST.get('last_name') email = request.POST.get('email') if not request.POST.get('password') or not", "except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self, request,", "\\ or any(c.islower() for c in password2) is False \\ or any(c.isdigit() for", "= DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def post(self, request, user_id):", "email = request.POST.get('email') password = request.POST.get('password') user = authenticate(request, username=email, password=password) if user", "try: user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain", "messages from django.core.mail import send_mail from django.urls import reverse from django.utils.encoding import force_bytes,", "'(There should be letters, lowercase letters, numbers and special characters)') return render(request, 'register.html')", "redirect('login-page') def post(self, request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1", "{'donations': donations, 'donation_categories': donation_categories}) def post(self, request, user_id): name = request.POST.get('name') surname =", "'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64", "password = request.POST.get('password') user = authenticate(request, username=email, password=password) if user is not None:", "get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Activate your account'", "user.is_active: return redirect('login-page') user.is_active = True user.save() messages.success(request, 'Account successfully activated') return redirect('login-page')", "get(self, request): return render(request, 'login.html') def post(self, request): try: email = request.POST.get('email') password", "!= <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save() TokenTemporaryStorage.objects.get(user=user).delete() messages.success(request, 'Password", "render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False user.save()", "django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from .utils import token_generator from", "'Please fill all fields correctly') return render(request, 'user-edit.html') password = request.POST.get('password') password2 =", "TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject", "user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain =", "= request.POST.get('surname') email = request.POST.get('email') password = request.POST.get('password') password2 = request.POST.get('password2') if len(password)", "LoginView(View): def get(self, request): return render(request, 'login.html') def post(self, request): try: email =", "'register.html') elif password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'register.html') user =", "letters, numbers and special characters)') return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user", "len(password2) < 8: messages.error(request, 'Password too short (Min. 8 characters)') return render(request, 'register.html')", "'new_password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'change-password.html') old_password = request.POST.get('old_password')", "redirect('/') class UserPanelView(View): def get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories", "incorrect') return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1 !=", "if user.is_active: return redirect('login-page') user.is_active = True user.save() messages.success(request, 'Account successfully activated') return", "else: messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page') except ObjectDoesNotExist:", "already exists') return render(request, 'register.html') elif password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return", "def post(self, request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 =", "redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page')", "render(request, 'login.html') def post(self, request): try: email = request.POST.get('email') password = request.POST.get('password') user", "new_password2 = request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html')", "django.views import View from main.models import Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import", "urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64':", "fail_silently=False, ) messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request, user_id):", "import messages from django.core.mail import send_mail from django.urls import reverse from django.utils.encoding import", "first_name user.last_name = last_name user.email = email user.save() messages.success(request, 'Data has been changed')", "render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64))", "request.POST.get('email') password = request.POST.get('password') password2 = request.POST.get('password2') if len(password) < 8 or len(password2)", "= request.POST.get('password2') if password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html') user", "login(request, user) return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request, 'login.html')", "or any(c.isupper() for c in password2) is False \\ or any(c.islower() for c", "class PasswordResetView(View): def get(self, request): return render(request, 'password-reset.html') def post(self, request): email =", "activated') return redirect('login-page') class LogoutView(View): def get(self, request): logout(request) return redirect('/') class UserPanelView(View):", "user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self, request,", "return render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def post(self, request, user_id): name =", "'donation_categories': donation_categories}) def post(self, request, user_id): name = request.POST.get('name') surname = request.POST.get('surname') message", "user is None: messages.error(request, 'Something went wrong') return render(request, 'change-password.html') login(request, new_user) messages.success(request,", "= message administrators = User.objects.filter(is_superuser=True) if not name or not surname or not", "redirect from django.views import View from main.models import Donation, DonationCategories, \\ TokenTemporaryStorage from", "successfully activated') return redirect('login-page') else: messages.error(request, 'Incorrect link or account is already activated')", "return render(request, 'register.html') elif any(not c.isalnum() for c in password2) is False \\", "\\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def", "View from main.models import Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import User from", "messages.error(request, 'Incorrect password') return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail does not", "\\ or any(c.isupper() for c in password2) is False \\ or any(c.islower() for", "already changed') return redirect('login-page') def post(self, request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user", "changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request, user_id): if request.user.id != user_id:", "urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64':", "ObjectDoesNotExist: messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page') class LogoutView(View):", "= 'Activate your account' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, your activation", "< 8: messages.error(request, 'Password too short (Min. 8 characters)') return render(request, 'register.html') elif", "messages.error(request, 'Please fill all fields correctly') return render(request, 'user-edit.html') password = request.POST.get('password') password2", "ObjectDoesNotExist: messages.error(request, 'Given e-mail does not exist in the database') return render(request, 'login.html')", "redirect('login-page') else: messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page') except", "[email], fail_silently=False, ) messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request,", "for c in password2) is False \\ or any(c.isupper() for c in password2)", "'Passwords mismatch') return render(request, 'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is", "send_mail from django.urls import reverse from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http", "render(request, 'register.html') class VerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64))", "form(Sent by user {name} {surname}' email_body = message administrators = User.objects.filter(is_superuser=True) if not", "DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import User from django.contrib.auth import authenticate, login, logout", "request.POST.get('message') email_subject = f'Contact form(Sent by user {name} {surname}' email_body = message administrators", "last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user)", "django.contrib.sites.shortcuts import get_current_site from .utils import token_generator from django.core.exceptions import ObjectDoesNotExist class LoginView(View):", "not exist in the database') return render(request, 'login.html') class RegisterView(View): def get(self, request):", "True user.save() messages.success(request, 'Account successfully activated') return redirect('login-page') else: messages.error(request, 'Incorrect link or", "'Activate your account' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, your activation link:", "= TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request, 'Account", "password = request.POST.get('password') password2 = request.POST.get('password2') if len(password) < 8 or len(password2) <", "urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from .utils import token_generator from django.core.exceptions import ObjectDoesNotExist", "already activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or account is already", "user) return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request, 'login.html') except", "from django.views import View from main.models import Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models", "reverse from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from", "user is None: messages.error(request, 'Incorrect password') return render(request, 'user-edit.html') user.first_name = first_name user.last_name", "id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token:", "elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given", "password2) is False: messages.error(request, 'The password does not have all special characters' '(There", "redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/')", "request.POST.get('password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'user-edit.html') password = request.POST.get('password')", "get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Password reset' activation_url", "8 or len(password2) < 8: messages.error(request, 'Password too short (Min. 8 characters)') return", "request.POST.get('old_password') or not request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request, 'Please fill all fields", "'change-password.html') def post(self, request, user_id): if not request.POST.get('old_password') or not request.POST.get('new_password1') or not", "force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if not", "from main.models import Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import User from django.contrib.auth", "= request.POST.get('last_name') email = request.POST.get('email') if not request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please", "should be letters, lowercase letters, numbers and special characters)') return render(request, 'register.html') elif", "by user {name} {surname}' email_body = message administrators = User.objects.filter(is_superuser=True) if not name", "email_body = f'Hello {user}, your activation link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email],", "messages.success(request, 'Data has been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request, user_id):", "password') return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail does not exist in", "first_name = request.POST.get('first_name') last_name = request.POST.get('last_name') email = request.POST.get('email') if not request.POST.get('password') or", ".order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def post(self,", "ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self, request, uidb64,", "fill all fields correctly') return render(request, 'user-edit.html') password = request.POST.get('password') password2 = request.POST.get('password2')", "return redirect('login-page') if user.is_active: return redirect('login-page') user.is_active = True user.save() messages.success(request, 'Account successfully", "Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories})", "request.POST.get('name') surname = request.POST.get('surname') message = request.POST.get('message') email_subject = f'Contact form(Sent by user", "from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts", "User.objects.filter(is_superuser=True) if not name or not surname or not message: messages.error(request, 'Please fill", "request.POST.get('email') try: user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token)", "if not token_generator.check_token(user, token): messages.error(request, 'Account is already activated') return redirect('login-page') if user.is_active:", "user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations':", "user = User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name = request.POST.get('last_name') email = request.POST.get('email') if", "user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not", "TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if not token_generator.check_token(user, token): messages.error(request, 'Password has already", "False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain", "'Something went wrong') return render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data successfully changed') return", "import get_current_site from .utils import token_generator from django.core.exceptions import ObjectDoesNotExist class LoginView(View): def", "class UserEditView(View): def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return", "request): return render(request, 'password-reset.html') def post(self, request): email = request.POST.get('email') try: user =", "redirect('/') for administrator in administrators: email = administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email],", "for further information') return render(request, 'register.html') class VerificationView(View): def get(self, request, uidb64, token):", "message = request.POST.get('message') email_subject = f'Contact form(Sent by user {name} {surname}' email_body =", "domain = get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Activate", "new_password1 != new_password2: messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user =", "reset link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your", "request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self,", "class UserPanelView(View): def get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories =", "or not message: messages.error(request, 'Please fill all fields correctly') return redirect('/') for administrator", "password does not have all special characters' '(There should be letters, lowercase letters,", "send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail inbox') return", "else: messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page') except ObjectDoesNotExist:", "not token_generator.check_token(user, token): messages.error(request, 'Password has already been changed') return redirect('login-page') return render(request,", "uidb64, 'token': token}) email_subject = 'Password reset' activation_url = f'http://{domain}{link}' email_body = f'Hello", "user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self, request,", "have all special characters' '(There should be letters, lowercase letters, numbers and special", "'change-password.html') old_password = request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password) if user is None:", "TokenTemporaryStorage from django.contrib.auth.models import User from django.contrib.auth import authenticate, login, logout from django.contrib", "from django.urls import reverse from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import", "request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request,", "django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import", "surname = request.POST.get('surname') message = request.POST.get('message') email_subject = f'Contact form(Sent by user {name}", "or len(password2) < 8: messages.error(request, 'Password too short (Min. 8 characters)') return render(request,", "user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64 =", "'<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail account for further information') return", "f'Hello {user}, twój password reset link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False,", "= authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Incorrect password') return render(request,", "request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self,", "User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name = request.POST.get('last_name') email = request.POST.get('email') if not request.POST.get('password')", "password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch')", "email_body = f'Hello {user}, twój password reset link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>',", "'token': token}) email_subject = 'Activate your account' activation_url = f'http://{domain}{link}' email_body = f'Hello", "= get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Activate your", "render(request, 'change-password.html') old_password = request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password) if user is", "return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1 != new_password2:", "messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request,", "token == stored_token: if not token_generator.check_token(user, token): messages.error(request, 'Password has already been changed')", "too short (Min. 8 characters)') return render(request, 'register.html') elif any(not c.isalnum() for c", "in password2) is False \\ or any(c.islower() for c in password2) is False", "return render(request, 'register.html') class VerificationView(View): def get(self, request, uidb64, token): try: id =", "all special characters' '(There should be letters, lowercase letters, numbers and special characters)')", "= request.POST.get('surname') message = request.POST.get('message') email_subject = f'Contact form(Sent by user {name} {surname}'", "'register.html') def post(self, request): name = request.POST.get('name') surname = request.POST.get('surname') email = request.POST.get('email')", "< 8 or len(password2) < 8: messages.error(request, 'Password too short (Min. 8 characters)')", "try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token ==", "request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self, request, user_id): user", "== stored_token: if not token_generator.check_token(user, token): messages.error(request, 'Password has already been changed') return", "render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None:", "def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token", "last_name user.email = email user.save() messages.success(request, 'Data has been changed') return redirect(f'/edit/{request.user.id}/') class", "f'Contact form(Sent by user {name} {surname}' email_body = message administrators = User.objects.filter(is_superuser=True) if", "'Passwords mismatch') return render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active", "surname = request.POST.get('surname') email = request.POST.get('email') password = request.POST.get('password') password2 = request.POST.get('password2') if", "request): return render(request, 'login.html') def post(self, request): try: email = request.POST.get('email') password =", "does not have all special characters' '(There should be letters, lowercase letters, numbers", "render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def post(self, request, user_id): name = request.POST.get('name')", "account for further information') return render(request, 'register.html') class VerificationView(View): def get(self, request, uidb64,", "fail_silently=False, ) messages.success(request, 'Check your e-mail inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request,", "activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, twój password reset link: {activation_url}' send_mail(", "or any(c.islower() for c in password2) is False \\ or any(c.isdigit() for c", "= token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token':", "= request.POST.get('email') password = request.POST.get('password') password2 = request.POST.get('password2') if len(password) < 8 or", "render(request, 'register.html') elif any(not c.isalnum() for c in password2) is False \\ or", "e-mail account for further information') return render(request, 'register.html') class VerificationView(View): def get(self, request,", "uidb64, 'token': token}) email_subject = 'Activate your account' activation_url = f'http://{domain}{link}' email_body =", "link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Password reset' activation_url =", "'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail does not exist in the database') return", "authenticate, login, logout from django.contrib import messages from django.core.mail import send_mail from django.urls", "uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link =", "'Account is already activated') return redirect('login-page') if user.is_active: return redirect('login-page') user.is_active = True", "authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Incorrect password') return render(request, 'user-edit.html')", "'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user with the given e-mail already exists') return", "= force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete()", "'Incorrect link or account is already activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect", "render(request, 'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Incorrect", "= False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain =", "for administrator in administrators: email = administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False,", "'Data has been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request, user_id): if", "password incorrect') return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1", "class LogoutView(View): def get(self, request): logout(request) return redirect('/') class UserPanelView(View): def get(self, request,", "fields correctly') return render(request, 'user-edit.html') password = request.POST.get('password') password2 = request.POST.get('password2') if password", "correctly') return render(request, 'change-password.html') old_password = request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password) if", "request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save()", "return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self, request, uidb64, token): try: id =", "with the given e-mail already exists') return render(request, 'register.html') elif password != <PASSWORD>:", "your e-mail account for further information') return render(request, 'register.html') class VerificationView(View): def get(self,", "donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def post(self, request,", "donation_categories}) def post(self, request, user_id): name = request.POST.get('name') surname = request.POST.get('surname') message =", "[email], fail_silently=False, ) messages.success(request, 'Check your e-mail inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist:", "return render(request, 'login.html') def post(self, request): try: email = request.POST.get('email') password = request.POST.get('password')", "messages.error(request, 'Something went wrong') return render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data successfully changed')", "if user is not None: login(request, user) return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request,", "password is already changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or password", "from .utils import token_generator from django.core.exceptions import ObjectDoesNotExist class LoginView(View): def get(self, request):", "email_subject = 'Activate your account' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, your", "UserPanelView(View): def get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all()", "<PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>) if", "= TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if not token_generator.check_token(user, token): messages.error(request, 'Password has", "not request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request, 'Please fill all fields correctly') return", "False \\ or any(c.islower() for c in password2) is False \\ or any(c.isdigit()", "post(self, request): name = request.POST.get('name') surname = request.POST.get('surname') email = request.POST.get('email') password =", "len(password) < 8 or len(password2) < 8: messages.error(request, 'Password too short (Min. 8", "link or account is already activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link", "is already activated') return redirect('login-page') if user.is_active: return redirect('login-page') user.is_active = True user.save()", "is False \\ or any(c.isupper() for c in password2) is False \\ or", "not surname or not message: messages.error(request, 'Please fill all fields correctly') return redirect('/')", "= request.POST.get('password') user = authenticate(request, username=email, password=password) if user is not None: login(request,", "= first_name user.last_name = last_name user.email = email user.save() messages.success(request, 'Data has been", "link or password is already changed') return redirect('login-page') def post(self, request, uidb64, token):", "messages.error(request, 'A user with the given e-mail already exists') return render(request, 'register.html') elif", "'new-password-form.html') else: messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page') except", "'Incorrect password') return render(request, 'user-edit.html') user.first_name = first_name user.last_name = last_name user.email =", "False \\ or any(c.isdigit() for c in password2) is False: messages.error(request, 'The password", "messages.error(request, 'Passwords mismatch') return render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>)", "request): logout(request) return redirect('/') class UserPanelView(View): def get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added')", "is None: messages.error(request, 'Old password incorrect') return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2", "= last_name user.email = email user.save() messages.success(request, 'Data has been changed') return redirect(f'/edit/{request.user.id}/')", "in the database') return render(request, 'login.html') class RegisterView(View): def get(self, request): return render(request,", "def post(self, request): name = request.POST.get('name') surname = request.POST.get('surname') email = request.POST.get('email') password", "link or account is already activated') return redirect('login-page') class LogoutView(View): def get(self, request):", "request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2", "8: messages.error(request, 'Password too short (Min. 8 characters)') return render(request, 'register.html') elif any(not", "messages.error(request, 'The password does not have all special characters' '(There should be letters,", "password2 = request.POST.get('password2') if password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html')", "username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Incorrect password') return render(request, 'user-edit.html') user.first_name", "or not surname or not message: messages.error(request, 'Please fill all fields correctly') return", "user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1 != <PASSWORD>:", "messages.success(request, 'Check your e-mail account for further information') return render(request, 'register.html') class VerificationView(View):", "ObjectDoesNotExist: messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page') def post(self,", "redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request,", "import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site", "f'http://{domain}{link}' email_body = f'Hello {user}, your activation link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>',", "return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user with the given e-mail already", "user.save() messages.success(request, 'Account successfully activated') return redirect('login-page') else: messages.error(request, 'Incorrect link or account", "return render(request, 'user-edit.html') def post(self, request, user_id): user = User.objects.get(id=user_id) first_name = request.POST.get('first_name')", "messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>)", "password=<PASSWORD>) if user is None: messages.error(request, 'Something went wrong') return render(request, 'change-password.html') login(request,", "email = request.POST.get('email') if not request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please fill all", "correctly') return render(request, 'user-edit.html') password = request.POST.get('password') password2 = request.POST.get('password2') if password !=", "messages.error(request, 'Incorrect password') return render(request, 'user-edit.html') user.first_name = first_name user.last_name = last_name user.email", "django.contrib import messages from django.core.mail import send_mail from django.urls import reverse from django.utils.encoding", "email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail account for further", "User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token):", "any(c.islower() for c in password2) is False \\ or any(c.isdigit() for c in", "TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject", "= get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Password reset'", "force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if", "= request.POST.get('password') password2 = request.POST.get('password2') if len(password) < 8 or len(password2) < 8:", "user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self, request, user_id): if not request.POST.get('old_password')", "[email], fail_silently=False, ) messages.success(request, 'Check your e-mail account for further information') return render(request,", "'password-reset.html') class PasswordResetVerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user", "User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request, 'Passwords", "request.POST.get('name') surname = request.POST.get('surname') email = request.POST.get('email') password = request.POST.get('password') password2 = request.POST.get('password2')", "'change-password.html') login(request, new_user) messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self,", "redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link or password is already changed')", ") messages.success(request, 'Check your e-mail inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect", "activated') return redirect('login-page') if user.is_active: return redirect('login-page') user.is_active = True user.save() messages.success(request, 'Account", "<filename>charity_donation_app/users/views.py from django.shortcuts import render, redirect from django.views import View from main.models import", "all fields correctly') return render(request, 'change-password.html') old_password = request.POST.get('old_password') user = authenticate(request, username=request.user.email,", "reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Password reset' activation_url = f'http://{domain}{link}' email_body", "'<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail inbox') return render(request, 'password-reset.html') except", "!= user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self, request, user_id): user =", "None: login(request, user) return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request,", "administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/')", "= User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user,", "User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail does", "is False \\ or any(c.isdigit() for c in password2) is False: messages.error(request, 'The", "password = request.POST.get('password') password2 = request.POST.get('password2') if password != <PASSWORD>: messages.error(request, 'Passwords mismatch')", "not have all special characters' '(There should be letters, lowercase letters, numbers and", "is already activated') return redirect('login-page') class LogoutView(View): def get(self, request): logout(request) return redirect('/')", "email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail inbox') return render(request, 'password-reset.html')", "is None: messages.error(request, 'Something went wrong') return render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data", "login, logout from django.contrib import messages from django.core.mail import send_mail from django.urls import", "not None: login(request, user) return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return", "special characters)') return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user with the given", "not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail", "<PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email)", "stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request, 'Account is already activated') return redirect('login-page')", "'Incorrect link or account is already activated') return redirect('login-page') class LogoutView(View): def get(self,", "exists') return render(request, 'register.html') elif password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request,", "get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token =", "'Check your e-mail inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return", "return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link or password is already changed') return", "has already been changed') return redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link", "be letters, lowercase letters, numbers and special characters)') return render(request, 'register.html') elif User.objects.filter(username=email):", "TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request, 'Account is already activated') return redirect('login-page') if", "'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self,", "messages.error(request, 'Old password incorrect') return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2')", "or account is already activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or", "does not exist in the database') return render(request, 'login.html') class RegisterView(View): def get(self,", "'user-edit.html') password = request.POST.get('password') password2 = request.POST.get('password2') if password != <PASSWORD>: messages.error(request, 'Passwords", "render(request, 'user-edit.html') user.first_name = first_name user.last_name = last_name user.email = email user.save() messages.success(request,", "return render(request, 'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request,", "activation link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your", "User.objects.filter(username=email): messages.error(request, 'A user with the given e-mail already exists') return render(request, 'register.html')", "stored_token: if not token_generator.check_token(user, token): messages.error(request, 'Password has already been changed') return redirect('login-page')", "any(c.isupper() for c in password2) is False \\ or any(c.islower() for c in", "request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'user-edit.html')", "post(self, request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>')", "'Incorrect link or password is already changed') return redirect('login-page') def post(self, request, uidb64,", "'Please fill all fields correctly') return render(request, 'change-password.html') old_password = request.POST.get('old_password') user =", "database') return render(request, 'login.html') class RegisterView(View): def get(self, request): return render(request, 'register.html') def", "user with the given e-mail already exists') return render(request, 'register.html') elif password !=", "import View from main.models import Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import User", "email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail account for further information')", "!= new_password2: messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request,", "old_password = request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password) if user is None: messages.error(request,", "activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, your activation link: {activation_url}' send_mail( email_subject,", "user_id): user = User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name = request.POST.get('last_name') email = request.POST.get('email')", "get_current_site from .utils import token_generator from django.core.exceptions import ObjectDoesNotExist class LoginView(View): def get(self,", "given e-mail already exists') return render(request, 'register.html') elif password != <PASSWORD>: messages.error(request, 'Passwords", "fill all fields correctly') return render(request, 'change-password.html') old_password = request.POST.get('old_password') user = authenticate(request,", "{user}, twój password reset link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, )", "import reverse from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode", "'Password too short (Min. 8 characters)') return render(request, 'register.html') elif any(not c.isalnum() for", "def get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return", "password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save() TokenTemporaryStorage.objects.get(user=user).delete() messages.success(request,", "for c in password2) is False \\ or any(c.islower() for c in password2)", "if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self, request, user_id):", "PasswordResetVerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id)", "return redirect('login-page') class LogoutView(View): def get(self, request): logout(request) return redirect('/') class UserPanelView(View): def", "'Old password incorrect') return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if", "token): id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2')", "= request.POST.get('email') try: user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id,", "lowercase letters, numbers and special characters)') return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A", "import token_generator from django.core.exceptions import ObjectDoesNotExist class LoginView(View): def get(self, request): return render(request,", "= request.POST.get('name') surname = request.POST.get('surname') message = request.POST.get('message') email_subject = f'Contact form(Sent by", "c in password2) is False \\ or any(c.isdigit() for c in password2) is", "from django.core.mail import send_mail from django.urls import reverse from django.utils.encoding import force_bytes, force_str,", "has been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request, user_id): if request.user.id", "changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or password is already changed')", "characters)') return render(request, 'register.html') elif any(not c.isalnum() for c in password2) is False", "c in password2) is False: messages.error(request, 'The password does not have all special", "'login.html') class RegisterView(View): def get(self, request): return render(request, 'register.html') def post(self, request): name", "= User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk))", "password=password) if user is not None: login(request, user) return redirect('/') elif not User.objects.get(email=email).check_password(password):", "'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Incorrect password')", "from django.contrib.auth import authenticate, login, logout from django.contrib import messages from django.core.mail import", "= authenticate(request, username=email, password=password) if user is not None: login(request, user) return redirect('/')", "request.POST.get('email') if not request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please fill all fields correctly')", "link or password is already changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link", "not request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please fill all fields correctly') return render(request,", "request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html',", "password=<PASSWORD>) if user is None: messages.error(request, 'Incorrect password') return render(request, 'user-edit.html') user.first_name =", "get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def", "user is None: messages.error(request, 'Old password incorrect') return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1')", "get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def", "!= user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self, request, user_id): if not", "email_subject = 'Password reset' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, twój password", "= User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name = request.POST.get('last_name') email = request.POST.get('email') if not", "login(request, new_user) messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request):", "inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html') class", "user_id): name = request.POST.get('name') surname = request.POST.get('surname') message = request.POST.get('message') email_subject = f'Contact", "authenticate(request, username=request.user.email, password=old_password) if user is None: messages.error(request, 'Old password incorrect') return render(request,", "= reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Password reset' activation_url = f'http://{domain}{link}'", "the database') return render(request, 'login.html') class RegisterView(View): def get(self, request): return render(request, 'register.html')", "c in password2) is False \\ or any(c.isupper() for c in password2) is", "mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save() TokenTemporaryStorage.objects.get(user=user).delete() messages.success(request, 'Password changed successfully') return redirect('login-page')", "your account' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, your activation link: {activation_url}'", "UserEditView(View): def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request,", "and special characters)') return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user with the", "def get(self, request): logout(request) return redirect('/') class UserPanelView(View): def get(self, request, user_id): donations", "post(self, request): email = request.POST.get('email') try: user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token", "request): email = request.POST.get('email') try: user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token =", "= authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Something went wrong') return", "letters, lowercase letters, numbers and special characters)') return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request,", "user.is_active = False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain", ") messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request, user_id): if", "changed') return redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link or password is", "render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def", "f'Hello {user}, your activation link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, )", "get(self, request): logout(request) return redirect('/') class UserPanelView(View): def get(self, request, user_id): donations =", "PasswordResetView(View): def get(self, request): return render(request, 'password-reset.html') def post(self, request): email = request.POST.get('email')", "if not request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please fill all fields correctly') return", "successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request): return render(request, 'password-reset.html') def", "messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request): return render(request,", "e-mail does not exist in the database') return render(request, 'login.html') class RegisterView(View): def", "(Min. 8 characters)') return render(request, 'register.html') elif any(not c.isalnum() for c in password2)", "User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if not token_generator.check_token(user, token): messages.error(request,", "= User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request,", "name or not surname or not message: messages.error(request, 'Please fill all fields correctly')", "already activated') return redirect('login-page') class LogoutView(View): def get(self, request): logout(request) return redirect('/') class", "email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail inbox') return render(request,", "token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64, 'token': token})", "'The password does not have all special characters' '(There should be letters, lowercase", "or account is already activated') return redirect('login-page') class LogoutView(View): def get(self, request): logout(request)", "DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def post(self, request, user_id): name", "def post(self, request, user_id): if not request.POST.get('old_password') or not request.POST.get('new_password1') or not request.POST.get(", "request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password) if user is None: messages.error(request, 'Old password", "user_id): if not request.POST.get('old_password') or not request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request, 'Please", "new_user) messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request): return", "short (Min. 8 characters)') return render(request, 'register.html') elif any(not c.isalnum() for c in", "User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link", "information') return render(request, 'register.html') class VerificationView(View): def get(self, request, uidb64, token): try: id", "uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if", "{activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail account", "messages.error(request, 'Please fill all fields correctly') return redirect('/') for administrator in administrators: email", "messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self, request, uidb64, token):", "return redirect('login-page') def post(self, request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id)", "return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self, request, user_id): user = User.objects.get(id=user_id) first_name", "not token_generator.check_token(user, token): messages.error(request, 'Account is already activated') return redirect('login-page') if user.is_active: return", "= request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password) if user is None: messages.error(request, 'Old", "request.POST.get('first_name') last_name = request.POST.get('last_name') email = request.POST.get('email') if not request.POST.get('password') or not request.POST.get('password2'):", "all fields correctly') return redirect('/') for administrator in administrators: email = administrator.email send_mail(", "request.POST.get('password') password2 = request.POST.get('password2') if password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request,", "= token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token':", "all fields correctly') return render(request, 'user-edit.html') password = request.POST.get('password') password2 = request.POST.get('password2') if", "request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token", "your e-mail inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request,", "get(self, request): return render(request, 'register.html') def post(self, request): name = request.POST.get('name') surname =", "redirect('login-page') user.is_active = True user.save() messages.success(request, 'Account successfully activated') return redirect('login-page') else: messages.error(request,", "token}) email_subject = 'Activate your account' activation_url = f'http://{domain}{link}' email_body = f'Hello {user},", "'login.html') def post(self, request): try: email = request.POST.get('email') password = request.POST.get('password') user =", "mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user", "'Given e-mail does not exist in the database') return render(request, 'login.html') class RegisterView(View):", "send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class", "been changed') return redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link or password", "stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request,", "= force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1", "token=token) domain = get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject =", "token_generator.check_token(user, token): messages.error(request, 'Account is already activated') return redirect('login-page') if user.is_active: return redirect('login-page')", "'Incorrect password') return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail does not exist", "= request.POST.get('email') if not request.POST.get('password') or not request.POST.get('password2'): messages.error(request, 'Please fill all fields", "token}) email_subject = 'Password reset' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, twój", "token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64,", "'A user with the given e-mail already exists') return render(request, 'register.html') elif password", "from django.shortcuts import render, redirect from django.views import View from main.models import Donation,", "fill all fields correctly') return redirect('/') for administrator in administrators: email = administrator.email", "'token': token}) email_subject = 'Password reset' activation_url = f'http://{domain}{link}' email_body = f'Hello {user},", "email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def", "= request.POST.get('password') password2 = request.POST.get('password2') if password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return", "except ObjectDoesNotExist: messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page') class", "messages.error(request, 'Password too short (Min. 8 characters)') return render(request, 'register.html') elif any(not c.isalnum()", "django.urls import reverse from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode,", "render(request, 'change-password.html') def post(self, request, user_id): if not request.POST.get('old_password') or not request.POST.get('new_password1') or", "the given e-mail already exists') return render(request, 'register.html') elif password != <PASSWORD>: messages.error(request,", "username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Something went wrong') return render(request, 'change-password.html')", "'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save() TokenTemporaryStorage.objects.get(user=user).delete() messages.success(request, 'Password changed successfully') return", "= f'Hello {user}, your activation link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False,", "'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if", "if user is None: messages.error(request, 'Old password incorrect') return render(request, 'change-password.html') new_password1 =", "post(self, request): try: email = request.POST.get('email') password = request.POST.get('password') user = authenticate(request, username=email,", "if not name or not surname or not message: messages.error(request, 'Please fill all", "special characters' '(There should be letters, lowercase letters, numbers and special characters)') return", "password is already changed') return redirect('login-page') def post(self, request, uidb64, token): id =", "for c in password2) is False: messages.error(request, 'The password does not have all", "return redirect('/') class UserPanelView(View): def get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken')", "'<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self,", "password2) is False \\ or any(c.islower() for c in password2) is False \\", "= request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>)", "is None: messages.error(request, 'Incorrect password') return render(request, 'user-edit.html') user.first_name = first_name user.last_name =", "user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link", "Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import User from django.contrib.auth import authenticate, login,", "def post(self, request): try: email = request.POST.get('email') password = request.POST.get('password') user = authenticate(request,", "elif any(not c.isalnum() for c in password2) is False \\ or any(c.isupper() for", "is already changed') return redirect('login-page') def post(self, request, uidb64, token): id = force_str(urlsafe_base64_decode(uidb64))", "redirect('login-page') if user.is_active: return redirect('login-page') user.is_active = True user.save() messages.success(request, 'Account successfully activated')", "messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page') def post(self, request,", "not request.POST.get('old_password') or not request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request, 'Please fill all", "get(self, request): return render(request, 'password-reset.html') def post(self, request): email = request.POST.get('email') try: user", "from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from .utils import token_generator", "django.contrib.auth.models import User from django.contrib.auth import authenticate, login, logout from django.contrib import messages", "'register.html') elif any(not c.isalnum() for c in password2) is False \\ or any(c.isupper()", "'Password reset' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, twój password reset link:", "user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token)", "\\ TokenTemporaryStorage from django.contrib.auth.models import User from django.contrib.auth import authenticate, login, logout from", "token): messages.error(request, 'Password has already been changed') return redirect('login-page') return render(request, 'new-password-form.html') else:", "request): try: email = request.POST.get('email') password = request.POST.get('password') user = authenticate(request, username=email, password=password)", "= f'http://{domain}{link}' email_body = f'Hello {user}, twój password reset link: {activation_url}' send_mail( email_subject,", "django.core.exceptions import ObjectDoesNotExist class LoginView(View): def get(self, request): return render(request, 'login.html') def post(self,", "= administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully sent') return", "= request.POST.get('message') email_subject = f'Contact form(Sent by user {name} {surname}' email_body = message", "first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token =", "request, user_id): name = request.POST.get('name') surname = request.POST.get('surname') message = request.POST.get('message') email_subject =", "token_generator.check_token(user, token): messages.error(request, 'Password has already been changed') return redirect('login-page') return render(request, 'new-password-form.html')", "send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail account for", "import authenticate, login, logout from django.contrib import messages from django.core.mail import send_mail from", "or password is already changed') return redirect('login-page') def post(self, request, uidb64, token): id", "administrators = User.objects.filter(is_superuser=True) if not name or not surname or not message: messages.error(request,", "new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request, 'Passwords mismatch')", "account is already activated') return redirect('login-page') class LogoutView(View): def get(self, request): logout(request) return", "fields correctly') return redirect('/') for administrator in administrators: email = administrator.email send_mail( email_subject,", "request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html') def post(self, request, user_id): if", "force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1 !=", "render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user with the given e-mail already exists')", "password2) is False \\ or any(c.isdigit() for c in password2) is False: messages.error(request,", "from django.contrib import messages from django.core.mail import send_mail from django.urls import reverse from", "for c in password2) is False \\ or any(c.isdigit() for c in password2)", "e-mail inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html')", "class RegisterView(View): def get(self, request): return render(request, 'register.html') def post(self, request): name =", "= User.objects.filter(is_superuser=True) if not name or not surname or not message: messages.error(request, 'Please", "LogoutView(View): def get(self, request): logout(request) return redirect('/') class UserPanelView(View): def get(self, request, user_id):", "not message: messages.error(request, 'Please fill all fields correctly') return redirect('/') for administrator in", "request, user_id): if not request.POST.get('old_password') or not request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request,", "user.set_password(<PASSWORD>) user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Something", "username=email, password=password) if user is not None: login(request, user) return redirect('/') elif not", "render(request, 'password-reset.html') def post(self, request): email = request.POST.get('email') try: user = User.objects.get(email=email) uidb64", "email=email) user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id,", "== stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request, 'Account is already activated') return", "class LoginView(View): def get(self, request): return render(request, 'login.html') def post(self, request): try: email", "link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail", "donations, 'donation_categories': donation_categories}) def post(self, request, user_id): name = request.POST.get('name') surname = request.POST.get('surname')", "not request.POST.get('password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'user-edit.html') password =", "return render(request, 'user-edit.html') user.first_name = first_name user.last_name = last_name user.email = email user.save()", "elif password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'register.html') user = User.objects.create_user(username=email,", "email_body = message administrators = User.objects.filter(is_superuser=True) if not name or not surname or", "token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('password-reset-verification', kwargs={'uidb64': uidb64,", "render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page')", "messages.success(request, 'Account successfully activated') return redirect('login-page') else: messages.error(request, 'Incorrect link or account is", "def post(self, request): email = request.POST.get('email') try: user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk))", "request.POST.get('password') user = authenticate(request, username=email, password=password) if user is not None: login(request, user)", "return render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False", "import render, redirect from django.views import View from main.models import Donation, DonationCategories, \\", "user.save() new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Something went", "!= <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>)", "if new_password1 != new_password2: messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save() new_user", "return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or account is already activated') return", "class PasswordResetVerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user =", "activated') return redirect('login-page') else: messages.error(request, 'Incorrect link or account is already activated') return", "get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request,", "def post(self, request, user_id): name = request.POST.get('name') surname = request.POST.get('surname') message = request.POST.get('message')", "message administrators = User.objects.filter(is_superuser=True) if not name or not surname or not message:", "= request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>)", "if password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html') user = authenticate(request,", "email_subject = f'Contact form(Sent by user {name} {surname}' email_body = message administrators =", "render(request, 'user-edit.html') password = request.POST.get('password') password2 = request.POST.get('password2') if password != <PASSWORD>: messages.error(request,", "post(self, request, user_id): name = request.POST.get('name') surname = request.POST.get('surname') message = request.POST.get('message') email_subject", "request.POST.get('surname') message = request.POST.get('message') email_subject = f'Contact form(Sent by user {name} {surname}' email_body", "from django.contrib.auth.models import User from django.contrib.auth import authenticate, login, logout from django.contrib import", "link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Activate your account' activation_url", "account is already activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or account", "User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active = False user.save() uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token", "user.email = email user.save() messages.success(request, 'Data has been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View):", "False: messages.error(request, 'The password does not have all special characters' '(There should be", "password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name,", "token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token})", "reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Activate your account' activation_url = f'http://{domain}{link}'", "email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View):", "e-mail already exists') return render(request, 'register.html') elif password != <PASSWORD>: messages.error(request, 'Passwords mismatch')", "return render(request, 'user-edit.html') password = request.POST.get('password') password2 = request.POST.get('password2') if password != <PASSWORD>:", "logout(request) return redirect('/') class UserPanelView(View): def get(self, request, user_id): donations = Donation.objects.filter(user_id=user_id).order_by('date_added') \\", "= email user.save() messages.success(request, 'Data has been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def", "return render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View):", "= f'http://{domain}{link}' email_body = f'Hello {user}, your activation link: {activation_url}' send_mail( email_subject, email_body,", "return redirect('login-page') user.is_active = True user.save() messages.success(request, 'Account successfully activated') return redirect('login-page') else:", "None: messages.error(request, 'Incorrect password') return render(request, 'user-edit.html') user.first_name = first_name user.last_name = last_name", "messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page') class LogoutView(View): def", "is already changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or password is", "uidb64, token): id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 =", "user {name} {surname}' email_body = message administrators = User.objects.filter(is_superuser=True) if not name or", "= urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('activate-page',", "return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or password is already changed') return", "user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self, request, user_id): user = User.objects.get(id=user_id)", "if user is None: messages.error(request, 'Incorrect password') return render(request, 'user-edit.html') user.first_name = first_name", "render, redirect from django.views import View from main.models import Donation, DonationCategories, \\ TokenTemporaryStorage", "'Successfully sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request, user_id): if request.user.id !=", "return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect password') return render(request, 'login.html') except ObjectDoesNotExist:", "except ObjectDoesNotExist: messages.error(request, 'Incorrect link or password is already changed') return redirect('login-page') def", "email user.save() messages.success(request, 'Data has been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self,", "def get(self, request): return render(request, 'password-reset.html') def post(self, request): email = request.POST.get('email') try:", "import Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import User from django.contrib.auth import authenticate,", "message: messages.error(request, 'Please fill all fields correctly') return redirect('/') for administrator in administrators:", "request.POST.get('password') password2 = request.POST.get('password2') if len(password) < 8 or len(password2) < 8: messages.error(request,", "return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request, user_id): if request.user.id != user_id: return", "class PasswordChangeView(View): def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return", "= force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if", "reset' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, twój password reset link: {activation_url}'", "any(c.isdigit() for c in password2) is False: messages.error(request, 'The password does not have", "already activated') return redirect('login-page') if user.is_active: return redirect('login-page') user.is_active = True user.save() messages.success(request,", "redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request): return render(request, 'password-reset.html') def post(self, request): email", "characters)') return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user with the given e-mail", "email = request.POST.get('email') password = request.POST.get('password') password2 = request.POST.get('password2') if len(password) < 8", "= request.POST.get('password2') if len(password) < 8 or len(password2) < 8: messages.error(request, 'Password too", "import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from .utils import token_generator from django.core.exceptions", "django.shortcuts import render, redirect from django.views import View from main.models import Donation, DonationCategories,", "mismatch') return render(request, 'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None:", "token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request, 'Account is already activated')", "= User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain", "None: messages.error(request, 'Old password incorrect') return render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 =", "logout from django.contrib import messages from django.core.mail import send_mail from django.urls import reverse", "if token == stored_token: TokenTemporaryStorage.objects.get(user=user).delete() if not token_generator.check_token(user, token): messages.error(request, 'Account is already", "import User from django.contrib.auth import authenticate, login, logout from django.contrib import messages from", "ObjectDoesNotExist class LoginView(View): def get(self, request): return render(request, 'login.html') def post(self, request): try:", "'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request): return render(request, 'password-reset.html')", "surname or not message: messages.error(request, 'Please fill all fields correctly') return redirect('/') for", "return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request): return render(request, 'password-reset.html') def post(self, request):", "return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View):", "elif User.objects.filter(username=email): messages.error(request, 'A user with the given e-mail already exists') return render(request,", "'Password has already been changed') return redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect", "administrators: email = administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully", "request.POST.get('surname') email = request.POST.get('email') password = request.POST.get('password') password2 = request.POST.get('password2') if len(password) <", "main.models import Donation, DonationCategories, \\ TokenTemporaryStorage from django.contrib.auth.models import User from django.contrib.auth import", "user is not None: login(request, user) return redirect('/') elif not User.objects.get(email=email).check_password(password): messages.error(request, 'Incorrect", "id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) password1 = request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if", "redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page')", "'Please fill all fields correctly') return redirect('/') for administrator in administrators: email =", "render(request, 'register.html') def post(self, request): name = request.POST.get('name') surname = request.POST.get('surname') email =", "= Donation.objects.filter(user_id=user_id).order_by('date_added') \\ .order_by('date_taken').order_by('time_taken').order_by('is_taken') donation_categories = DonationCategories.objects.all() return render(request, 'user_panel.html', {'donations': donations, 'donation_categories':", "sent') return redirect(f'/panel/{request.user.id}/') class UserEditView(View): def get(self, request, user_id): if request.user.id != user_id:", "try: email = request.POST.get('email') password = request.POST.get('password') user = authenticate(request, username=email, password=password) if", "user = authenticate(request, username=request.user.email, password=old_password) if user is None: messages.error(request, 'Old password incorrect')", "password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html') user = authenticate(request, username=request.user.email,", "activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or account is already activated')", "request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request, 'Passwords mismatch') return render(request, 'change-password.html') user.set_password(<PASSWORD>) user.save()", "c.isalnum() for c in password2) is False \\ or any(c.isupper() for c in", "password') return render(request, 'user-edit.html') user.first_name = first_name user.last_name = last_name user.email = email", "user.first_name = first_name user.last_name = last_name user.email = email user.save() messages.success(request, 'Data has", "'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request, 'Passwords", "numbers and special characters)') return render(request, 'register.html') elif User.objects.filter(username=email): messages.error(request, 'A user with", "authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Something went wrong') return render(request,", "went wrong') return render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/')", "fail_silently=False, ) messages.success(request, 'Check your e-mail account for further information') return render(request, 'register.html')", "'Check your e-mail account for further information') return render(request, 'register.html') class VerificationView(View): def", "messages.success(request, 'Check your e-mail inbox') return render(request, 'password-reset.html') except ObjectDoesNotExist: messages.error(request, 'Incorrect e-mail')", "'register.html') class VerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user", "'user-edit.html') def post(self, request, user_id): user = User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name =", "password2 = request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html')", "return render(request, 'register.html') def post(self, request): name = request.POST.get('name') surname = request.POST.get('surname') email", "= 'Password reset' activation_url = f'http://{domain}{link}' email_body = f'Hello {user}, twój password reset", "or not request.POST.get('password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'user-edit.html') password", "= True user.save() messages.success(request, 'Account successfully activated') return redirect('login-page') else: messages.error(request, 'Incorrect link", "force_bytes, force_str, DjangoUnicodeDecodeError from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from", "request, user_id): user = User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name = request.POST.get('last_name') email =", "messages.error(request, 'Account is already activated') return redirect('login-page') if user.is_active: return redirect('login-page') user.is_active =", "in password2) is False \\ or any(c.isupper() for c in password2) is False", "your activation link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check", "user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token == stored_token: if not token_generator.check_token(user,", "'Incorrect link or password is already changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect", "request): return render(request, 'register.html') def post(self, request): name = request.POST.get('name') surname = request.POST.get('surname')", "'user-edit.html') user.first_name = first_name user.last_name = last_name user.email = email user.save() messages.success(request, 'Data", "token): try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id) stored_token = TokenTemporaryStorage.objects.get(user=user).token if token", "= request.POST.get('<PASSWORD>') password2 = request.POST.get('password2') if password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch') return", "'Account successfully activated') return redirect('login-page') else: messages.error(request, 'Incorrect link or account is already", "post(self, request, user_id): user = User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name = request.POST.get('last_name') email", "render(request, 'user-edit.html') def post(self, request, user_id): user = User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name", "user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Incorrect password') return", "in administrators: email = administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request,", "return redirect('login-page') else: messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page')", "'user_panel.html', {'donations': donations, 'donation_categories': donation_categories}) def post(self, request, user_id): name = request.POST.get('name') surname", "render(request, 'login.html') class RegisterView(View): def get(self, request): return render(request, 'register.html') def post(self, request):", "def get(self, request): return render(request, 'register.html') def post(self, request): name = request.POST.get('name') surname", "mismatch') return render(request, 'register.html') user = User.objects.create_user(username=email, first_name=name, last_name=surname, email=email) user.set_password(<PASSWORD>) user.is_active =", "further information') return render(request, 'register.html') class VerificationView(View): def get(self, request, uidb64, token): try:", "name = request.POST.get('name') surname = request.POST.get('surname') message = request.POST.get('message') email_subject = f'Contact form(Sent", "RegisterView(View): def get(self, request): return render(request, 'register.html') def post(self, request): name = request.POST.get('name')", "def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'change-password.html')", "messages.error(request, 'Given e-mail does not exist in the database') return render(request, 'login.html') class", "8 characters)') return render(request, 'register.html') elif any(not c.isalnum() for c in password2) is", "return render(request, 'login.html') class RegisterView(View): def get(self, request): return render(request, 'register.html') def post(self,", "django.core.mail import send_mail from django.urls import reverse from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError", "not request.POST.get( 'new_password2'): messages.error(request, 'Please fill all fields correctly') return render(request, 'change-password.html') old_password", "= request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request, 'Passwords mismatch') return", "e-mail') return render(request, 'password-reset.html') class PasswordResetVerificationView(View): def get(self, request, uidb64, token): try: id", "authenticate(request, username=email, password=password) if user is not None: login(request, user) return redirect('/') elif", "render(request, 'register.html') elif password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'register.html') user", "def get(self, request): return render(request, 'login.html') def post(self, request): try: email = request.POST.get('email')", "if len(password) < 8 or len(password2) < 8: messages.error(request, 'Password too short (Min.", "from django.core.exceptions import ObjectDoesNotExist class LoginView(View): def get(self, request): return render(request, 'login.html') def", "messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html') user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user", "email = administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Successfully sent')", "password2 = request.POST.get('password2') if len(password) < 8 or len(password2) < 8: messages.error(request, 'Password", "is False: messages.error(request, 'The password does not have all special characters' '(There should", "kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Activate your account' activation_url = f'http://{domain}{link}' email_body", "= request.POST.get('name') surname = request.POST.get('surname') email = request.POST.get('email') password = request.POST.get('password') password2 =", "class VerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user =", "if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self, request, user_id):", "render(request, 'change-password.html') new_password1 = request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request,", "already changed') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or password is already", "return redirect('/') for administrator in administrators: email = administrator.email send_mail( email_subject, email_body, '<EMAIL>',", "administrator in administrators: email = administrator.email send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, )", "already been changed') return redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link or", ".utils import token_generator from django.core.exceptions import ObjectDoesNotExist class LoginView(View): def get(self, request): return", "if not token_generator.check_token(user, token): messages.error(request, 'Password has already been changed') return redirect('login-page') return", "return redirect('login-page') return render(request, 'new-password-form.html') else: messages.error(request, 'Incorrect link or password is already", "{activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check your e-mail inbox')", "django.contrib.auth import authenticate, login, logout from django.contrib import messages from django.core.mail import send_mail", "password reset link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request, 'Check", "= urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user) TokenTemporaryStorage.objects.create(user_id=user.id, token=token) domain = get_current_site(request).domain link = reverse('password-reset-verification',", "c in password2) is False \\ or any(c.islower() for c in password2) is", "messages.error(request, 'Please fill all fields correctly') return render(request, 'change-password.html') old_password = request.POST.get('old_password') user", "return render(request, 'change-password.html') old_password = request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password) if user", "if password1 != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'new-password-form.html') user.set_password(<PASSWORD>) user.save() TokenTemporaryStorage.objects.get(user=user).delete()", "None: messages.error(request, 'Something went wrong') return render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data successfully", "request): name = request.POST.get('name') surname = request.POST.get('surname') email = request.POST.get('email') password = request.POST.get('password')", "is already activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request, 'Incorrect link or account is", "request.POST.get('new_password1') new_password2 = request.POST.get('new_password2') if new_password1 != new_password2: messages.error(request, 'Passwords mismatch') return render(request,", "def post(self, request, user_id): user = User.objects.get(id=user_id) first_name = request.POST.get('first_name') last_name = request.POST.get('last_name')", "wrong') return render(request, 'change-password.html') login(request, new_user) messages.success(request, 'Data successfully changed') return redirect(f'/edit/{request.user.id}/') class", "email = request.POST.get('email') try: user = User.objects.get(email=email) uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) token = token_generator.make_token(user)", "= request.POST.get('email') password = request.POST.get('password') user = authenticate(request, username=email, password=password) if user is", "= reverse('activate-page', kwargs={'uidb64': uidb64, 'token': token}) email_subject = 'Activate your account' activation_url =", "return render(request, 'register.html') elif password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'register.html')", "changed') return redirect(f'/edit/{request.user.id}/') class PasswordResetView(View): def get(self, request): return render(request, 'password-reset.html') def post(self,", "redirect('login-page') class LogoutView(View): def get(self, request): logout(request) return redirect('/') class UserPanelView(View): def get(self,", "redirect(f'/edit/{request.user.id}/') return render(request, 'user-edit.html') def post(self, request, user_id): user = User.objects.get(id=user_id) first_name =", "or any(c.isdigit() for c in password2) is False: messages.error(request, 'The password does not", "twój password reset link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email], fail_silently=False, ) messages.success(request,", "return render(request, 'login.html') except ObjectDoesNotExist: messages.error(request, 'Given e-mail does not exist in the", "request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request, 'Please fill all fields correctly') return render(request,", "fields correctly') return render(request, 'change-password.html') old_password = request.POST.get('old_password') user = authenticate(request, username=request.user.email, password=old_password)", "password=old_password) if user is None: messages.error(request, 'Old password incorrect') return render(request, 'change-password.html') new_password1", "if token == stored_token: if not token_generator.check_token(user, token): messages.error(request, 'Password has already been", "VerificationView(View): def get(self, request, uidb64, token): try: id = force_str(urlsafe_base64_decode(uidb64)) user = User.objects.get(id=id)", "been changed') return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request, user_id): if request.user.id !=", "urlsafe_base64_encode, urlsafe_base64_decode from django.contrib.sites.shortcuts import get_current_site from .utils import token_generator from django.core.exceptions import", "return redirect(f'/edit/{request.user.id}/') class PasswordChangeView(View): def get(self, request, user_id): if request.user.id != user_id: return", "or not request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request, 'Please fill all fields correctly')", "user = authenticate(request, username=email, password=password) if user is not None: login(request, user) return", "if not request.POST.get('old_password') or not request.POST.get('new_password1') or not request.POST.get( 'new_password2'): messages.error(request, 'Please fill", "token): messages.error(request, 'Account is already activated') return redirect('login-page') if user.is_active: return redirect('login-page') user.is_active", "= f'Hello {user}, twój password reset link: {activation_url}' send_mail( email_subject, email_body, '<EMAIL>', [email],", "token_generator from django.core.exceptions import ObjectDoesNotExist class LoginView(View): def get(self, request): return render(request, 'login.html')", "return render(request, 'password-reset.html') def post(self, request): email = request.POST.get('email') try: user = User.objects.get(email=email)", "request.POST.get('email') password = request.POST.get('password') user = authenticate(request, username=email, password=password) if user is not", "not name or not surname or not message: messages.error(request, 'Please fill all fields", "new_user = authenticate(request, username=request.user.email, password=<PASSWORD>) if user is None: messages.error(request, 'Something went wrong')", "False \\ or any(c.isupper() for c in password2) is False \\ or any(c.islower()", "import send_mail from django.urls import reverse from django.utils.encoding import force_bytes, force_str, DjangoUnicodeDecodeError from", "import ObjectDoesNotExist class LoginView(View): def get(self, request): return render(request, 'login.html') def post(self, request):", "{name} {surname}' email_body = message administrators = User.objects.filter(is_superuser=True) if not name or not", "PasswordChangeView(View): def get(self, request, user_id): if request.user.id != user_id: return redirect(f'/edit/{request.user.id}/') return render(request,", "f'http://{domain}{link}' email_body = f'Hello {user}, twój password reset link: {activation_url}' send_mail( email_subject, email_body,", "request.POST.get('password2') if password != <PASSWORD>: messages.error(request, 'Passwords mismatch') return render(request, 'user-edit.html') user =", "messages.error(request, 'Incorrect link or account is already activated') return redirect('login-page') except ObjectDoesNotExist: messages.error(request," ]
[ "objects\"\"\" class Target: def __init__(self, id_, target_type): self.id = id_ self.type = target_type", "to abstract mappings to other objects\"\"\" class Target: def __init__(self, id_, target_type): self.id", "meant to abstract mappings to other objects\"\"\" class Target: def __init__(self, id_, target_type):", "to other objects\"\"\" class Target: def __init__(self, id_, target_type): self.id = id_ self.type", "mappings to other objects\"\"\" class Target: def __init__(self, id_, target_type): self.id = id_", "other objects\"\"\" class Target: def __init__(self, id_, target_type): self.id = id_ self.type =", "__init__(self, id_, target_type): self.id = id_ self.type = target_type def __repr__(self): return \"<Target#{id},", "\"\"\"Target class meant to abstract mappings to other objects\"\"\" class Target: def __init__(self,", "abstract mappings to other objects\"\"\" class Target: def __init__(self, id_, target_type): self.id =", "class Target: def __init__(self, id_, target_type): self.id = id_ self.type = target_type def", "def __init__(self, id_, target_type): self.id = id_ self.type = target_type def __repr__(self): return", "id_, target_type): self.id = id_ self.type = target_type def __repr__(self): return \"<Target#{id}, {type}>\".format(**self.__dict__)", "<filename>tableauserverclient/models/target.py \"\"\"Target class meant to abstract mappings to other objects\"\"\" class Target: def", "class meant to abstract mappings to other objects\"\"\" class Target: def __init__(self, id_,", "Target: def __init__(self, id_, target_type): self.id = id_ self.type = target_type def __repr__(self):" ]
[ "import torch.nn as nn import torch.nn.functional as F import numpy as np from", "r\"\"\"Gradient Vaccine (GradVac). This method is proposed in `Gradient Vaccine: Investigating and Improving", "<https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us. Args: beta (float, default=0.5): The exponential moving", "default=0.5): The exponential moving average (EMA) decay parameter. .. warning:: GradVac is not", "Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us. Args: beta (float, default=0.5): The exponential", "(pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i]", "= pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j]", "def backward(self, losses, **kwargs): beta = kwargs['beta'] if self.rep_grad: raise ValueError('No support method", "class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method is proposed in `Gradient Vaccine: Investigating", "batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] + beta*rho_ij new_grads = pc_grads.sum(0)", "Optimization in Massively Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by", "as F import numpy as np from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient", "tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item()", "if rho_ij < self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] +=", "torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs): beta = kwargs['beta'] if self.rep_grad: raise ValueError('No", "nn import torch.nn.functional as F import numpy as np from LibMTL.weighting.abstract_weighting import AbsWeighting", "and implemented by us. Args: beta (float, default=0.5): The exponential moving average (EMA)", "This method is proposed in `Gradient Vaccine: Investigating and Improving Multi-task Optimization in", "task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]: w", "as nn import torch.nn.functional as F import numpy as np from LibMTL.weighting.abstract_weighting import", "method GradVac with representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') #", "proposed in `Gradient Vaccine: Investigating and Improving Multi-task Optimization in Massively Multilingual Models", "= torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs): beta = kwargs['beta'] if self.rep_grad: raise", "random.shuffle(task_index) for tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij", "grads = self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads = grads.clone()", "grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i,", "self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs): beta = kwargs['beta'] if self.rep_grad:", "= torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i,", "parameter. .. warning:: GradVac is not supported by representation gradients, i.e., ``rep_grad`` must", "grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] + beta*rho_ij new_grads =", "by representation gradients, i.e., ``rep_grad`` must be ``False``. \"\"\" def __init__(self): super(GradVac, self).__init__()", "grad_dim] batch_weight = np.ones(len(losses)) pc_grads = grads.clone() for tn_i in range(self.task_num): task_index =", "batch_weight = np.ones(len(losses)) pc_grads = grads.clone() for tn_i in range(self.task_num): task_index = list(range(self.task_num))", "Improving Multi-task Optimization in Massively Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and", "tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j]", "tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] +", "import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method is proposed in `Gradient", "losses, **kwargs): beta = kwargs['beta'] if self.rep_grad: raise ValueError('No support method GradVac with", "def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs): beta = kwargs['beta']", "self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] +=", "gradients, i.e., ``rep_grad`` must be ``False``. \"\"\" def __init__(self): super(GradVac, self).__init__() def init_param(self):", "Vaccine (GradVac). This method is proposed in `Gradient Vaccine: Investigating and Improving Multi-task", "Multi-task Optimization in Massively Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented", "supported by representation gradients, i.e., ``rep_grad`` must be ``False``. \"\"\" def __init__(self): super(GradVac,", "decay parameter. .. warning:: GradVac is not supported by representation gradients, i.e., ``rep_grad``", "as np from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method", "by us. Args: beta (float, default=0.5): The exponential moving average (EMA) decay parameter.", "(GradVac). This method is proposed in `Gradient Vaccine: Investigating and Improving Multi-task Optimization", "in Massively Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us.", "(ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us. Args: beta (float, default=0.5):", "is not supported by representation gradients, i.e., ``rep_grad`` must be ``False``. \"\"\" def", "np from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method is", "= list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) /", "raise ValueError('No support method GradVac with representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads =", "representation gradients, i.e., ``rep_grad`` must be ``False``. \"\"\" def __init__(self): super(GradVac, self).__init__() def", "tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i,", "w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i,", "= self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads = grads.clone() for", "exponential moving average (EMA) decay parameter. .. warning:: GradVac is not supported by", "task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if", "/ (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt())", "GradVac is not supported by representation gradients, i.e., ``rep_grad`` must be ``False``. \"\"\"", "Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us. Args: beta", "mode='backward') # [task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads = grads.clone() for tn_i in", "tn_i in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index: rho_ij", "for tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij <", "LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method is proposed in", "Investigating and Improving Multi-task Optimization in Massively Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_", "i.e., ``rep_grad`` must be ``False``. \"\"\" def __init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T", "random import torch.nn as nn import torch.nn.functional as F import numpy as np", "pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] + beta*rho_ij", "(float, default=0.5): The exponential moving average (EMA) decay parameter. .. warning:: GradVac is", "super(GradVac, self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs): beta", "The exponential moving average (EMA) decay parameter. .. warning:: GradVac is not supported", "2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us. Args: beta (float, default=0.5): The", "beta = kwargs['beta'] if self.rep_grad: raise ValueError('No support method GradVac with representation gradients", "kwargs['beta'] if self.rep_grad: raise ValueError('No support method GradVac with representation gradients (rep_grad=True)') else:", "**kwargs): beta = kwargs['beta'] if self.rep_grad: raise ValueError('No support method GradVac with representation", "range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i],", "Vaccine: Investigating and Improving Multi-task Optimization in Massively Multilingual Models (ICLR 2021 Spotlight)", "Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us. Args: beta (float,", "rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]: w =", "warning:: GradVac is not supported by representation gradients, i.e., ``rep_grad`` must be ``False``.", "if self.rep_grad: raise ValueError('No support method GradVac with representation gradients (rep_grad=True)') else: self._compute_grad_dim()", "us. Args: beta (float, default=0.5): The exponential moving average (EMA) decay parameter. ..", "(EMA) decay parameter. .. warning:: GradVac is not supported by representation gradients, i.e.,", "Args: beta (float, default=0.5): The exponential moving average (EMA) decay parameter. .. warning::", "+= w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] + beta*rho_ij new_grads = pc_grads.sum(0) self._reset_grad(new_grads)", "torch.nn as nn import torch.nn.functional as F import numpy as np from LibMTL.weighting.abstract_weighting", "self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs): beta =", "backward(self, losses, **kwargs): beta = kwargs['beta'] if self.rep_grad: raise ValueError('No support method GradVac", "= grads.clone() for tn_i in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j", "import torch, random import torch.nn as nn import torch.nn.functional as F import numpy", "[task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads = grads.clone() for tn_i in range(self.task_num): task_index", "self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] + beta*rho_ij new_grads = pc_grads.sum(0) self._reset_grad(new_grads) return batch_weight", "pc_grads = grads.clone() for tn_i in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for", "self.task_num).to(self.device) def backward(self, losses, **kwargs): beta = kwargs['beta'] if self.rep_grad: raise ValueError('No support", "is proposed in `Gradient Vaccine: Investigating and Improving Multi-task Optimization in Massively Multilingual", "Massively Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\ and implemented by us. Args:", "be ``False``. \"\"\" def __init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device)", "torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i,", "+= grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] + beta*rho_ij new_grads", "and Improving Multi-task Optimization in Massively Multilingual Models (ICLR 2021 Spotlight) <https://openreview.net/forum?id=F1vEjWK-lH_>`_ \\", "init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs): beta = kwargs['beta'] if", ".. warning:: GradVac is not supported by representation gradients, i.e., ``rep_grad`` must be", "gradients (rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight =", "AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method is proposed in `Gradient Vaccine:", "ValueError('No support method GradVac with representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses,", "must be ``False``. \"\"\" def __init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num,", "F import numpy as np from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine", "implemented by us. Args: beta (float, default=0.5): The exponential moving average (EMA) decay", "list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm())", "GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method is proposed in `Gradient Vaccine: Investigating and", "rho_ij < self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w", "else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads", "\\ and implemented by us. Args: beta (float, default=0.5): The exponential moving average", "<reponame>median-research-group/LibMTL<filename>LibMTL/weighting/GradVac.py import torch, random import torch.nn as nn import torch.nn.functional as F import", "GradVac with representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') # [task_num,", "for tn_i in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index:", "moving average (EMA) decay parameter. .. warning:: GradVac is not supported by representation", "self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads = grads.clone() for tn_i", "(rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight = np.ones(len(losses))", "`Gradient Vaccine: Investigating and Improving Multi-task Optimization in Massively Multilingual Models (ICLR 2021", "\"\"\" def __init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self,", "with representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') # [task_num, grad_dim]", "in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j]) / (pc_grads[tn_i].norm()*grads[tn_j].norm()) if rho_ij < self.rho_T[tn_i, tn_j]:", "import numpy as np from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac).", "support method GradVac with representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward')", "method is proposed in `Gradient Vaccine: Investigating and Improving Multi-task Optimization in Massively", "# [task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads = grads.clone() for tn_i in range(self.task_num):", "``False``. \"\"\" def __init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def", "import torch.nn.functional as F import numpy as np from LibMTL.weighting.abstract_weighting import AbsWeighting class", "not supported by representation gradients, i.e., ``rep_grad`` must be ``False``. \"\"\" def __init__(self):", "representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight", "``rep_grad`` must be ``False``. \"\"\" def __init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T =", "average (EMA) decay parameter. .. warning:: GradVac is not supported by representation gradients,", "= np.ones(len(losses)) pc_grads = grads.clone() for tn_i in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i)", "torch.nn.functional as F import numpy as np from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting):", "pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] =", "__init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses, **kwargs):", "in `Gradient Vaccine: Investigating and Improving Multi-task Optimization in Massively Multilingual Models (ICLR", "in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index: rho_ij =", "= kwargs['beta'] if self.rep_grad: raise ValueError('No support method GradVac with representation gradients (rep_grad=True)')", "from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This method is proposed", "torch, random import torch.nn as nn import torch.nn.functional as F import numpy as", "beta (float, default=0.5): The exponential moving average (EMA) decay parameter. .. warning:: GradVac", "np.ones(len(losses)) pc_grads = grads.clone() for tn_i in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index)", "def __init__(self): super(GradVac, self).__init__() def init_param(self): self.rho_T = torch.zeros(self.task_num, self.task_num).to(self.device) def backward(self, losses,", "task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in task_index: rho_ij = torch.dot(pc_grads[tn_i], grads[tn_j])", "grads.clone() for tn_i in range(self.task_num): task_index = list(range(self.task_num)) task_index.remove(tn_i) random.shuffle(task_index) for tn_j in", "w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i, tn_j] + beta*rho_ij new_grads = pc_grads.sum(0) self._reset_grad(new_grads) return", "self.rep_grad: raise ValueError('No support method GradVac with representation gradients (rep_grad=True)') else: self._compute_grad_dim() grads", "tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j] += w.item() self.rho_T[tn_i, tn_j] = (1-beta)*self.rho_T[tn_i,", "< self.rho_T[tn_i, tn_j]: w = pc_grads[tn_i].norm()*(self.rho_T[tn_i, tn_j]*(1-rho_ij**2).sqrt()-rho_ij*(1-self.rho_T[tn_i, tn_j]**2).sqrt())/(grads[tn_j].norm()*(1-self.rho_T[tn_i, tn_j]**2).sqrt()) pc_grads[tn_i] += grads[tn_j]*w batch_weight[tn_j]", "self._compute_grad_dim() grads = self._compute_grad(losses, mode='backward') # [task_num, grad_dim] batch_weight = np.ones(len(losses)) pc_grads =", "numpy as np from LibMTL.weighting.abstract_weighting import AbsWeighting class GradVac(AbsWeighting): r\"\"\"Gradient Vaccine (GradVac). This" ]
[ "def format_segment(seg): \"\"\" Formats a segment assuming it's an instance of class segment", "len(data) > 6: audiofile, channel, speaker, start, stop, label = data[:6] text =", "class derived in convert_text \"\"\" from asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\" Formats", "an instance of class segment with elements audiofile, channel, speaker, start and stop", "files Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from", "'stop': stop, 'label': label, 'text': text } ) return seg if seg and", "seg = segment( { 'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start': start, 'stop':", "in f: seg = parse_line(line) if seg is not None: segments.append(seg) return segments", "python \"\"\" Module for reading STM files Expected file format is derived from", "Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class", "'stop', 'label', 'text')) def parse_line(line): \" parse a single line of an stm", "return seg if seg and seg.validate() else None def read_file(file_name): \"\"\" Reads an", "'text')) def parse_line(line): \" parse a single line of an stm file\" data", "speaker, start, stop, label = data[:6] text = \" \".join(data[6:]) seg = segment(", "audiofile, channel, speaker, start and stop times, label, and text \"\"\" return \"", "read_file(file_name): \"\"\" Reads an STM file, skipping any gap lines \"\"\" segments =", "'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start': start, 'stop': stop, 'label': label, 'text':", "segments = [] with open(file_name, encoding=\"utf-8\") as f: for line in f: seg", "STM file, skipping any gap lines \"\"\" segments = [] with open(file_name, encoding=\"utf-8\")", "for line in f: seg = parse_line(line) if seg is not None: segments.append(seg)", "and stop times, label, and text \"\"\" return \" \".join(seg.__dict__[_] for _ in", "\"\"\" from asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\" Formats a segment assuming it's", "parse a single line of an stm file\" data = line.strip().split() seg =", "format_segment(seg): \"\"\" Formats a segment assuming it's an instance of class segment with", "speaker, start and stop times, label, and text \"\"\" return \" \".join(seg.__dict__[_] for", "in convert_text \"\"\" from asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\" Formats a segment", "STM files Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment", "if seg and seg.validate() else None def read_file(file_name): \"\"\" Reads an STM file,", "\"\"\" Module for reading STM files Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0", "\"\"\" Reads an STM file, skipping any gap lines \"\"\" segments = []", "def read_file(file_name): \"\"\" Reads an STM file, skipping any gap lines \"\"\" segments", "single line of an stm file\" data = line.strip().split() seg = None if", "channel, 'speaker': speaker, 'start': start, 'stop': stop, 'label': label, 'text': text } )", "'text': text } ) return seg if seg and seg.validate() else None def", "for reading STM files Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects", "as f: for line in f: seg = parse_line(line) if seg is not", "asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\" Formats a segment assuming it's an instance", "from asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\" Formats a segment assuming it's an", "and seg.validate() else None def read_file(file_name): \"\"\" Reads an STM file, skipping any", "segment( { 'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start': start, 'stop': stop, 'label':", "from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class derived in convert_text \"\"\" from", "seg if seg and seg.validate() else None def read_file(file_name): \"\"\" Reads an STM", "is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class derived in convert_text", "channel, speaker, start, stop, label = data[:6] text = \" \".join(data[6:]) seg =", "Formats a segment assuming it's an instance of class segment with elements audiofile,", "text = \" \".join(data[6:]) seg = segment( { 'audiofile': audiofile, 'channel': channel, 'speaker':", "= [] with open(file_name, encoding=\"utf-8\") as f: for line in f: seg =", "This expects a segment from class derived in convert_text \"\"\" from asrtoolkit.data_structures.segment import", "reading STM files Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a", "speaker, 'start': start, 'stop': stop, 'label': label, 'text': text } ) return seg", "'speaker', 'start', 'stop', 'label', 'text')) def parse_line(line): \" parse a single line of", "line.strip().split() seg = None if len(data) > 6: audiofile, channel, speaker, start, stop,", "= \" \".join(data[6:]) seg = segment( { 'audiofile': audiofile, 'channel': channel, 'speaker': speaker,", "convert_text \"\"\" from asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\" Formats a segment assuming", "None def read_file(file_name): \"\"\" Reads an STM file, skipping any gap lines \"\"\"", "an stm file\" data = line.strip().split() seg = None if len(data) > 6:", "_ in ('audiofile', 'channel', 'speaker', 'start', 'stop', 'label', 'text')) def parse_line(line): \" parse", "file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class derived", "seg = None if len(data) > 6: audiofile, channel, speaker, start, stop, label", "None if len(data) > 6: audiofile, channel, speaker, start, stop, label = data[:6]", "\".join(seg.__dict__[_] for _ in ('audiofile', 'channel', 'speaker', 'start', 'stop', 'label', 'text')) def parse_line(line):", "if len(data) > 6: audiofile, channel, speaker, start, stop, label = data[:6] text", "stm file\" data = line.strip().split() seg = None if len(data) > 6: audiofile,", "return \" \".join(seg.__dict__[_] for _ in ('audiofile', 'channel', 'speaker', 'start', 'stop', 'label', 'text'))", "'label', 'text')) def parse_line(line): \" parse a single line of an stm file\"", "start, 'stop': stop, 'label': label, 'text': text } ) return seg if seg", "= data[:6] text = \" \".join(data[6:]) seg = segment( { 'audiofile': audiofile, 'channel':", "in ('audiofile', 'channel', 'speaker', 'start', 'stop', 'label', 'text')) def parse_line(line): \" parse a", "it's an instance of class segment with elements audiofile, channel, speaker, start and", "'start': start, 'stop': stop, 'label': label, 'text': text } ) return seg if", "'speaker': speaker, 'start': start, 'stop': stop, 'label': label, 'text': text } ) return", "with elements audiofile, channel, speaker, start and stop times, label, and text \"\"\"", "channel, speaker, start and stop times, label, and text \"\"\" return \" \".join(seg.__dict__[_]", "of an stm file\" data = line.strip().split() seg = None if len(data) >", "for _ in ('audiofile', 'channel', 'speaker', 'start', 'stop', 'label', 'text')) def parse_line(line): \"", "segment def format_segment(seg): \"\"\" Formats a segment assuming it's an instance of class", "a single line of an stm file\" data = line.strip().split() seg = None", "derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class derived in convert_text \"\"\"", "class segment with elements audiofile, channel, speaker, start and stop times, label, and", "data[:6] text = \" \".join(data[6:]) seg = segment( { 'audiofile': audiofile, 'channel': channel,", "assuming it's an instance of class segment with elements audiofile, channel, speaker, start", "line of an stm file\" data = line.strip().split() seg = None if len(data)", "'channel', 'speaker', 'start', 'stop', 'label', 'text')) def parse_line(line): \" parse a single line", "gap lines \"\"\" segments = [] with open(file_name, encoding=\"utf-8\") as f: for line", "\" \".join(seg.__dict__[_] for _ in ('audiofile', 'channel', 'speaker', 'start', 'stop', 'label', 'text')) def", "seg and seg.validate() else None def read_file(file_name): \"\"\" Reads an STM file, skipping", "import segment def format_segment(seg): \"\"\" Formats a segment assuming it's an instance of", ") return seg if seg and seg.validate() else None def read_file(file_name): \"\"\" Reads", "text \"\"\" return \" \".join(seg.__dict__[_] for _ in ('audiofile', 'channel', 'speaker', 'start', 'stop',", "line in f: seg = parse_line(line) if seg is not None: segments.append(seg) return", "derived in convert_text \"\"\" from asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\" Formats a", "data = line.strip().split() seg = None if len(data) > 6: audiofile, channel, speaker,", "('audiofile', 'channel', 'speaker', 'start', 'stop', 'label', 'text')) def parse_line(line): \" parse a single", "label, and text \"\"\" return \" \".join(seg.__dict__[_] for _ in ('audiofile', 'channel', 'speaker',", "with open(file_name, encoding=\"utf-8\") as f: for line in f: seg = parse_line(line) if", "text } ) return seg if seg and seg.validate() else None def read_file(file_name):", "segment with elements audiofile, channel, speaker, start and stop times, label, and text", "6: audiofile, channel, speaker, start, stop, label = data[:6] text = \" \".join(data[6:])", "\"\"\" Formats a segment assuming it's an instance of class segment with elements", "from class derived in convert_text \"\"\" from asrtoolkit.data_structures.segment import segment def format_segment(seg): \"\"\"", "of class segment with elements audiofile, channel, speaker, start and stop times, label,", "stop times, label, and text \"\"\" return \" \".join(seg.__dict__[_] for _ in ('audiofile',", "skipping any gap lines \"\"\" segments = [] with open(file_name, encoding=\"utf-8\") as f:", "audiofile, 'channel': channel, 'speaker': speaker, 'start': start, 'stop': stop, 'label': label, 'text': text", "'channel': channel, 'speaker': speaker, 'start': start, 'stop': stop, 'label': label, 'text': text }", "} ) return seg if seg and seg.validate() else None def read_file(file_name): \"\"\"", "> 6: audiofile, channel, speaker, start, stop, label = data[:6] text = \"", "file, skipping any gap lines \"\"\" segments = [] with open(file_name, encoding=\"utf-8\") as", "file\" data = line.strip().split() seg = None if len(data) > 6: audiofile, channel,", "'label': label, 'text': text } ) return seg if seg and seg.validate() else", "segment from class derived in convert_text \"\"\" from asrtoolkit.data_structures.segment import segment def format_segment(seg):", "a segment assuming it's an instance of class segment with elements audiofile, channel,", "stop, 'label': label, 'text': text } ) return seg if seg and seg.validate()", "any gap lines \"\"\" segments = [] with open(file_name, encoding=\"utf-8\") as f: for", "times, label, and text \"\"\" return \" \".join(seg.__dict__[_] for _ in ('audiofile', 'channel',", "format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class derived in", "and text \"\"\" return \" \".join(seg.__dict__[_] for _ in ('audiofile', 'channel', 'speaker', 'start',", "audiofile, channel, speaker, start, stop, label = data[:6] text = \" \".join(data[6:]) seg", "expects a segment from class derived in convert_text \"\"\" from asrtoolkit.data_structures.segment import segment", "\"\"\" segments = [] with open(file_name, encoding=\"utf-8\") as f: for line in f:", "start, stop, label = data[:6] text = \" \".join(data[6:]) seg = segment( {", "label = data[:6] text = \" \".join(data[6:]) seg = segment( { 'audiofile': audiofile,", "f: for line in f: seg = parse_line(line) if seg is not None:", "def parse_line(line): \" parse a single line of an stm file\" data =", "lines \"\"\" segments = [] with open(file_name, encoding=\"utf-8\") as f: for line in", "open(file_name, encoding=\"utf-8\") as f: for line in f: seg = parse_line(line) if seg", "[] with open(file_name, encoding=\"utf-8\") as f: for line in f: seg = parse_line(line)", "http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This expects a segment from class derived in convert_text \"\"\" from asrtoolkit.data_structures.segment", "= None if len(data) > 6: audiofile, channel, speaker, start, stop, label =", "a segment from class derived in convert_text \"\"\" from asrtoolkit.data_structures.segment import segment def", "else None def read_file(file_name): \"\"\" Reads an STM file, skipping any gap lines", "parse_line(line): \" parse a single line of an stm file\" data = line.strip().split()", "Reads an STM file, skipping any gap lines \"\"\" segments = [] with", "= segment( { 'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start': start, 'stop': stop,", "\" parse a single line of an stm file\" data = line.strip().split() seg", "Module for reading STM files Expected file format is derived from http://www1.icsi.berkeley.edu/Speech/docs/sctk-1.2/infmts.htm#stm_fmt_name_0 This", "#!/usr/bin/env python \"\"\" Module for reading STM files Expected file format is derived", "start and stop times, label, and text \"\"\" return \" \".join(seg.__dict__[_] for _", "{ 'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start': start, 'stop': stop, 'label': label,", "encoding=\"utf-8\") as f: for line in f: seg = parse_line(line) if seg is", "\"\"\" return \" \".join(seg.__dict__[_] for _ in ('audiofile', 'channel', 'speaker', 'start', 'stop', 'label',", "segment assuming it's an instance of class segment with elements audiofile, channel, speaker,", "'start', 'stop', 'label', 'text')) def parse_line(line): \" parse a single line of an", "\".join(data[6:]) seg = segment( { 'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start': start,", "instance of class segment with elements audiofile, channel, speaker, start and stop times,", "seg.validate() else None def read_file(file_name): \"\"\" Reads an STM file, skipping any gap", "stop, label = data[:6] text = \" \".join(data[6:]) seg = segment( { 'audiofile':", "\" \".join(data[6:]) seg = segment( { 'audiofile': audiofile, 'channel': channel, 'speaker': speaker, 'start':", "= line.strip().split() seg = None if len(data) > 6: audiofile, channel, speaker, start,", "elements audiofile, channel, speaker, start and stop times, label, and text \"\"\" return", "an STM file, skipping any gap lines \"\"\" segments = [] with open(file_name,", "label, 'text': text } ) return seg if seg and seg.validate() else None" ]
[ "category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' % subcategory_name) some_subcategory.save() def handle(self,", "% category_name) some_category.save() for subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory", "django.core.management.base import BaseCommand from core.models import Category import os import json class Command(BaseCommand):", "= Category(name=category_name) print('Adding category %s' % category_name) some_category.save() for subcategory_name in category[category_name]: some_subcategory", "category_name = list(category.keys())[0] some_category = Category(name=category_name) print('Adding category %s' % category_name) some_category.save() for", "category in categories_json: category_name = list(category.keys())[0] some_category = Category(name=category_name) print('Adding category %s' %", "import os import json class Command(BaseCommand): help = 'Populate the deanslist database with", "= 'Populate the deanslist database with some mock data to display in index.html'", "the deanslist database with some mock data to display in index.html' def _create_categories(self):", "some mock data to display in index.html' def _create_categories(self): with open('categories.json', 'r') as", "display in index.html' def _create_categories(self): with open('categories.json', 'r') as categories_file: categories_json = json.load(categories_file)", "to display in index.html' def _create_categories(self): with open('categories.json', 'r') as categories_file: categories_json =", "categories_json = json.load(categories_file) for category in categories_json: category_name = list(category.keys())[0] some_category = Category(name=category_name)", "Category(name=category_name) print('Adding category %s' % category_name) some_category.save() for subcategory_name in category[category_name]: some_subcategory =", "some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' % subcategory_name) some_subcategory.save() def handle(self, *args,", "json.load(categories_file) for category in categories_json: category_name = list(category.keys())[0] some_category = Category(name=category_name) print('Adding category", "core.models import Category import os import json class Command(BaseCommand): help = 'Populate the", "some_category.save() for subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' %", "deanslist database with some mock data to display in index.html' def _create_categories(self): with", "in categories_json: category_name = list(category.keys())[0] some_category = Category(name=category_name) print('Adding category %s' % category_name)", "with open('categories.json', 'r') as categories_file: categories_json = json.load(categories_file) for category in categories_json: category_name", "categories_file: categories_json = json.load(categories_file) for category in categories_json: category_name = list(category.keys())[0] some_category =", "subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' % subcategory_name) some_subcategory.save()", "'r') as categories_file: categories_json = json.load(categories_file) for category in categories_json: category_name = list(category.keys())[0]", "Command(BaseCommand): help = 'Populate the deanslist database with some mock data to display", "database with some mock data to display in index.html' def _create_categories(self): with open('categories.json',", "data to display in index.html' def _create_categories(self): with open('categories.json', 'r') as categories_file: categories_json", "open('categories.json', 'r') as categories_file: categories_json = json.load(categories_file) for category in categories_json: category_name =", "'Populate the deanslist database with some mock data to display in index.html' def", "in index.html' def _create_categories(self): with open('categories.json', 'r') as categories_file: categories_json = json.load(categories_file) for", "list(category.keys())[0] some_category = Category(name=category_name) print('Adding category %s' % category_name) some_category.save() for subcategory_name in", "import json class Command(BaseCommand): help = 'Populate the deanslist database with some mock", "Category import os import json class Command(BaseCommand): help = 'Populate the deanslist database", "index.html' def _create_categories(self): with open('categories.json', 'r') as categories_file: categories_json = json.load(categories_file) for category", "= Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' % subcategory_name) some_subcategory.save() def handle(self, *args, **options):", "BaseCommand from core.models import Category import os import json class Command(BaseCommand): help =", "os import json class Command(BaseCommand): help = 'Populate the deanslist database with some", "= list(category.keys())[0] some_category = Category(name=category_name) print('Adding category %s' % category_name) some_category.save() for subcategory_name", "class Command(BaseCommand): help = 'Populate the deanslist database with some mock data to", "category_name) some_category.save() for subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s'", "in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' % subcategory_name) some_subcategory.save() def", "def _create_categories(self): with open('categories.json', 'r') as categories_file: categories_json = json.load(categories_file) for category in", "import BaseCommand from core.models import Category import os import json class Command(BaseCommand): help", "from django.core.management.base import BaseCommand from core.models import Category import os import json class", "_create_categories(self): with open('categories.json', 'r') as categories_file: categories_json = json.load(categories_file) for category in categories_json:", "mock data to display in index.html' def _create_categories(self): with open('categories.json', 'r') as categories_file:", "%s' % category_name) some_category.save() for subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding", "categories_json: category_name = list(category.keys())[0] some_category = Category(name=category_name) print('Adding category %s' % category_name) some_category.save()", "print('Adding category %s' % category_name) some_category.save() for subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name,", "<filename>core/management/commands/populate_db.py from django.core.management.base import BaseCommand from core.models import Category import os import json", "for category in categories_json: category_name = list(category.keys())[0] some_category = Category(name=category_name) print('Adding category %s'", "Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' % subcategory_name) some_subcategory.save() def handle(self, *args, **options): self._create_categories()", "some_category = Category(name=category_name) print('Adding category %s' % category_name) some_category.save() for subcategory_name in category[category_name]:", "with some mock data to display in index.html' def _create_categories(self): with open('categories.json', 'r')", "from core.models import Category import os import json class Command(BaseCommand): help = 'Populate", "= json.load(categories_file) for category in categories_json: category_name = list(category.keys())[0] some_category = Category(name=category_name) print('Adding", "import Category import os import json class Command(BaseCommand): help = 'Populate the deanslist", "category %s' % category_name) some_category.save() for subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category)", "as categories_file: categories_json = json.load(categories_file) for category in categories_json: category_name = list(category.keys())[0] some_category", "for subcategory_name in category[category_name]: some_subcategory = Category(name=subcategory_name, parent_category=some_category) print('\\tAdding subcategory %s' % subcategory_name)", "help = 'Populate the deanslist database with some mock data to display in", "json class Command(BaseCommand): help = 'Populate the deanslist database with some mock data" ]
[ "class Livro(EntityBase): __tablename__ = 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo =", "sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque = sqlalchemy.Column(sqlalchemy.Integer) preco = sqlalchemy.Column(sqlalchemy.Float) ativo = sqlalchemy.Column(sqlalchemy.Boolean)", "= sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque = sqlalchemy.Column(sqlalchemy.Integer) preco =", "entities.entityBase import * class Livro(EntityBase): __tablename__ = 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn =", "* class Livro(EntityBase): __tablename__ = 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo", "isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque", "sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer)", "import * class Livro(EntityBase): __tablename__ = 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20))", "sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque = sqlalchemy.Column(sqlalchemy.Integer)", "= sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque = sqlalchemy.Column(sqlalchemy.Integer) preco = sqlalchemy.Column(sqlalchemy.Float) ativo =", "titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque = sqlalchemy.Column(sqlalchemy.Integer) preco", "'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255))", "= sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao =", "= sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque =", "= 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor =", "__tablename__ = 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor", "sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque = sqlalchemy.Column(sqlalchemy.Integer) preco = sqlalchemy.Column(sqlalchemy.Float)", "Livro(EntityBase): __tablename__ = 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255))", "from entities.entityBase import * class Livro(EntityBase): __tablename__ = 'livro' id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn", "autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao = sqlalchemy.Column(sqlalchemy.Integer) quantidadeEstoque = sqlalchemy.Column(sqlalchemy.Integer) preco = sqlalchemy.Column(sqlalchemy.Float) ativo", "id = sqlalchemy.Column(sqlalchemy.Integer,primary_key=True) isbn = sqlalchemy.Column(sqlalchemy.String(length=20)) titulo = sqlalchemy.Column(sqlalchemy.String(length=255)) autor = sqlalchemy.Column(sqlalchemy.String(length=255)) anoPublicacao" ]
[ "@st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None,", "= Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True ) x = preprocessing_image(image_pil_array) result =", "model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB')", "Image import json import os import streamlit as st import pandas as pd", "image', use_column_width=True ) x = preprocessing_image(image_pil_array) result = model.predict(x) predict_rank = decode_predictions(result, top=5)[0]", "cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0)", "= preprocessing_image(image_pil_array) result = model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('####", "predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart =", "\"\"\" image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape) x", "model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank,", "\"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x", "import os import streamlit as st import pandas as pd import numpy as", "import pandas as pd import numpy as np from keras.preprocessing import image from", ") x = preprocessing_image(image_pil_array) result = model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1],", "= np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x) print(x.shape) return x def main(): model", "= decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name',", "uploaded_file = st.file_uploader('Choose a image file to predict') if uploaded_file is not None:", "\"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return model", "= model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df =", "None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True ) x = preprocessing_image(image_pil_array)", "from keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。", "image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x) print(x.shape) return x def", "main(): model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image file", "Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True ) x = preprocessing_image(image_pil_array) result = model.predict(x)", "load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image file to predict') if", "PIL import Image import json import os import streamlit as st import pandas", "columns=['index', 'name', 'predict_proba']) st.write(df) df_chart = df[['name', 'predict_proba']].set_index('name') st.bar_chart(df_chart) if __name__ == '__main__':", "\"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return model def preprocessing_image(image_pil_array:", "np from keras.preprocessing import image from keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False)", "is not None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True ) x", "import Image import json import os import streamlit as st import pandas as", "result = model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df", "def main(): model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image", "import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model", "print(x.shape) return x def main(): model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file =", "predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index',", "= image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x)", "model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'):", "axis=0) print(x.shape) x = preprocess_input(x) print(x.shape) return x def main(): model = load_model()", "x = preprocess_input(x) print(x.shape) return x def main(): model = load_model() st.title('画像分類器') st.write(\"pretrained", "'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart = df[['name',", "os import streamlit as st import pandas as pd import numpy as np", "if uploaded_file is not None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True", "= preprocess_input(x) print(x.shape) return x def main(): model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\")", "= Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\"", "import json import os import streamlit as st import pandas as pd import", "input_tensor=None, input_shape=None, pooling=None, classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf:", "https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape)", "as pd import numpy as np from keras.preprocessing import image from keras.applications.xception import", "return x def main(): model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose", "pooling=None, classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\"", "pandas as pd import numpy as np from keras.preprocessing import image from keras.applications.xception", "print(x.shape) x = preprocess_input(x) print(x.shape) return x def main(): model = load_model() st.title('画像分類器')", "np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x) print(x.shape) return x def main(): model =", "= image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x) print(x.shape) return x", "preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x =", "= pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart = df[['name', 'predict_proba']].set_index('name') st.bar_chart(df_chart) if __name__", "load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return", "pd import numpy as np from keras.preprocessing import image from keras.applications.xception import Xception,", "keras.preprocessing import image from keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def", "予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart = df[['name', 'predict_proba']].set_index('name') st.bar_chart(df_chart)", "df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart = df[['name', 'predict_proba']].set_index('name') st.bar_chart(df_chart) if", "import numpy as np from keras.preprocessing import image from keras.applications.xception import Xception, preprocess_input,", "from PIL import Image import json import os import streamlit as st import", "= st.file_uploader('Choose a image file to predict') if uploaded_file is not None: image_pil_array", "predict') if uploaded_file is not None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image',", "as st import pandas as pd import numpy as np from keras.preprocessing import", "pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart = df[['name', 'predict_proba']].set_index('name') st.bar_chart(df_chart) if __name__ ==", "preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True,", "from keras.preprocessing import image from keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True)", "<filename>PySimpleGUI_PyWebIO_Streamlit/test/image_clf.py from PIL import Image import json import os import streamlit as st", "file to predict') if uploaded_file is not None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array,", "top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df)", "import streamlit as st import pandas as pd import numpy as np from", "uploaded_file is not None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True )", "x = preprocessing_image(image_pil_array) result = model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。')", "x def main(): model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a", "a image file to predict') if uploaded_file is not None: image_pil_array = Image.open(uploaded_file)", "decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba'])", "st import pandas as pd import numpy as np from keras.preprocessing import image", "Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。", "classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array", "x = image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x) print(x.shape) return", "= load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image file to predict')", "model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image file to", "modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image file to predict') if uploaded_file is not", "image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape) x =", "preprocessing_image(image_pil_array) result = model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5')", "st.image( image_pil_array, caption='uploaded image', use_column_width=True ) x = preprocessing_image(image_pil_array) result = model.predict(x) predict_rank", "image from keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\"", "return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array =", "'name', 'predict_proba']) st.write(df) df_chart = df[['name', 'predict_proba']].set_index('name') st.bar_chart(df_chart) if __name__ == '__main__': main()", "st.write('機械学習モデルは画像を', predict_rank[0][1], 'と予測しました。') st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart", "def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x", "input_shape=None, pooling=None, classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception", "keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\"", "st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None,", "x = np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x) print(x.shape) return x def main():", "st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image file to predict') if uploaded_file", "image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True ) x = preprocessing_image(image_pil_array) result", "st.write('#### 予測確率@p5') df = pd.DataFrame(predict_rank, columns=['index', 'name', 'predict_proba']) st.write(df) df_chart = df[['name', 'predict_proba']].set_index('name')", "image file to predict') if uploaded_file is not None: image_pil_array = Image.open(uploaded_file) st.image(", "False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None,", "Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model =", "weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return model def preprocessing_image(image_pil_array: 'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化", "caption='uploaded image', use_column_width=True ) x = preprocessing_image(image_pil_array) result = model.predict(x) predict_rank = decode_predictions(result,", "json import os import streamlit as st import pandas as pd import numpy", "import image from keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model():", "st.file_uploader('Choose a image file to predict') if uploaded_file is not None: image_pil_array =", "image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x = np.expand_dims(x, axis=0) print(x.shape) x = preprocess_input(x) print(x.shape)", "not None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded image', use_column_width=True ) x =", "to predict') if uploaded_file is not None: image_pil_array = Image.open(uploaded_file) st.image( image_pil_array, caption='uploaded", "preprocess_input(x) print(x.shape) return x def main(): model = load_model() st.title('画像分類器') st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file", "Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000) return model def", "as np from keras.preprocessing import image from keras.applications.xception import Xception, preprocess_input, decode_predictions st.set_option('deprecation.showfileUploaderEncoding',", "def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000)", "'PIL.Image'): \"\"\" 予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array)", "予測するためにPIL.Imageで読み込んだarrayを加工する。 299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x =", "use_column_width=True ) x = preprocessing_image(image_pil_array) result = model.predict(x) predict_rank = decode_predictions(result, top=5)[0] st.write('機械学習モデルは画像を',", "299×299にして、pixelを正規化 cf: https://keras.io/ja/applications/#xception \"\"\" image_pil_array = image_pil_array.convert('RGB') x = image.img_to_array(image_pil_array) x = np.expand_dims(x,", "decode_predictions st.set_option('deprecation.showfileUploaderEncoding', False) @st.cache(allow_output_mutation=True) def load_model(): \"\"\" Xceptionモデルをloadする。 \"\"\" model = Xception(include_top=True, weights='imagenet',", "image_pil_array, caption='uploaded image', use_column_width=True ) x = preprocessing_image(image_pil_array) result = model.predict(x) predict_rank =", "st.write(\"pretrained modelを使って、アップロードした画像を分類します。\") uploaded_file = st.file_uploader('Choose a image file to predict') if uploaded_file is", "streamlit as st import pandas as pd import numpy as np from keras.preprocessing", "numpy as np from keras.preprocessing import image from keras.applications.xception import Xception, preprocess_input, decode_predictions" ]
[ "== FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or fd.type ==", "or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func = long else: raise", "encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value = [] for v in value: encoded_value.append(encode_func(v))", "value, encode_func(fd)) return result def encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func = proto_to_dict", "== FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type ==", "if fd.label == FD.LABEL_REPEATED: encoded_value = [] for v in value: encoded_value.append(encode_func(v)) else:", "msg) return msg def decode_msg(dictionary, msg): msg.SetInParent() for key, value in dictionary.iteritems(): if", "isinstance(values[0], dict): for v in values: dict_to_proto(v, field.add()) else: for v in values:", "return result def encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type", "proto_to_dict(msg): result = {} for fd, value in msg.ListFields(): result[fd.name] = encode_value(fd, value,", "or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return int(value) if fd.type ==", "elif fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or", "FD.TYPE_STRING: func = unicode elif fd.type == FD.TYPE_BYTES: func = b64encode elif fd.type", "fd.type == FD.TYPE_ENUM: func = int elif fd.type == FD.TYPE_INT64 or fd.type ==", "return func def encode_value(fd, value, encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value = []", "decode_msg(value, getattr(msg, field)) elif isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field,", "elif fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or", "FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return int(value) if fd.type == FD.TYPE_INT64 or fd.type", "def decode_msg(dictionary, msg): msg.SetInParent() for key, value in dictionary.iteritems(): if value is None:", "else: encoded_value = encode_func(value) return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg", "fd.type == FD.TYPE_SFIXED64: func = long else: raise Error(\"Unknown field type %s\", fd.type)", "encoded_value = [] for v in value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return", "-*- coding: utf-8 -*- from base64 import b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor", "fd): if isinstance(values[0], dict): for v in values: dict_to_proto(v, field.add()) else: for v", "or fd.type == FD.TYPE_FLOAT: func = float elif fd.type == FD.TYPE_INT32 or fd.type", "field.add()) else: for v in values: field.append(decode_value(v, fd)) def decode_value(value, fd): if fd.type", "FD.TYPE_ENUM: return int(value) if fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type", "field.append(decode_value(v, fd)) def decode_value(value, fd): if fd.type == FD.TYPE_BYTES: return b64decode(value) if fd.type", "or fd.type == FD.TYPE_ENUM: func = int elif fd.type == FD.TYPE_INT64 or fd.type", "FD.TYPE_FLOAT: func = float elif fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or", "fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type", "decode_msg(dictionary, msg) return msg def decode_msg(dictionary, msg): msg.SetInParent() for key, value in dictionary.iteritems():", "or fd.type == FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or", "FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func = long else:", "v in values: field.append(decode_value(v, fd)) def decode_value(value, fd): if fd.type == FD.TYPE_BYTES: return", "== FD.LABEL_REPEATED: encoded_value = [] for v in value: encoded_value.append(encode_func(v)) else: encoded_value =", "== FD.TYPE_BOOL: return bool(value) if fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or", "fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return int(value) if fd.type == FD.TYPE_INT64", "field, fd): if isinstance(values[0], dict): for v in values: dict_to_proto(v, field.add()) else: for", "bool elif fd.type == FD.TYPE_STRING: func = unicode elif fd.type == FD.TYPE_BYTES: func", "elif fd.type == FD.TYPE_STRING: func = unicode elif fd.type == FD.TYPE_BYTES: func =", "or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func = int elif fd.type", "return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg def decode_msg(dictionary, msg): msg.SetInParent()", "fd)) def decode_value(value, fd): if fd.type == FD.TYPE_BYTES: return b64decode(value) if fd.type ==", "msg): msg.SetInParent() for key, value in dictionary.iteritems(): if value is None: continue field", "msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if isinstance(values[0], dict): for v in values: dict_to_proto(v,", "msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if isinstance(values[0], dict):", "type %s\", fd.type) return func def encode_value(fd, value, encode_func): if fd.label == FD.LABEL_REPEATED:", "encode_value(fd, value, encode_func(fd)) return result def encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func =", "= str(key) if isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif isinstance(value, list): decode_list(value, getattr(msg,", "result[fd.name] = encode_value(fd, value, encode_func(fd)) return result def encode_func(fd): if fd.type == FD.TYPE_MESSAGE:", "func = float elif fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type", "fd.type == FD.TYPE_BYTES: func = b64encode elif fd.type == FD.TYPE_DOUBLE or fd.type ==", "base64 import b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor as FD def proto_to_dict(msg): result", "if value is None: continue field = str(key) if isinstance(value, dict): decode_msg(value, getattr(msg,", "int elif fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64", "= {} for fd, value in msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd)) return", "fd.type == FD.TYPE_BYTES: return b64decode(value) if fd.type == FD.TYPE_BOOL: return bool(value) if fd.type", "in values: dict_to_proto(v, field.add()) else: for v in values: field.append(decode_value(v, fd)) def decode_value(value,", "== FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return int(value) if", "== FD.TYPE_ENUM: func = int elif fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64", "== FD.TYPE_STRING: func = unicode elif fd.type == FD.TYPE_BYTES: func = b64encode elif", "from base64 import b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor as FD def proto_to_dict(msg):", "def encode_value(fd, value, encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value = [] for v", "fd.type == FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func = float elif fd.type ==", "or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func", "func = proto_to_dict elif fd.type == FD.TYPE_BOOL: func = bool elif fd.type ==", "== FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func = int elif fd.type == FD.TYPE_INT64", "result def encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type ==", "fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return int(value)", "= bool elif fd.type == FD.TYPE_STRING: func = unicode elif fd.type == FD.TYPE_BYTES:", "== FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or fd.type ==", "for fd, value in msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd)) return result def", "func = bool elif fd.type == FD.TYPE_STRING: func = unicode elif fd.type ==", "if fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or", "FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM:", "dict): decode_msg(value, getattr(msg, field)) elif isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg,", "msg): decode_msg(dictionary, msg) return msg def decode_msg(dictionary, msg): msg.SetInParent() for key, value in", "msg def decode_msg(dictionary, msg): msg.SetInParent() for key, value in dictionary.iteritems(): if value is", "== FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func = float elif fd.type == FD.TYPE_INT32", "encode_value(fd, value, encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value = [] for v in", "== FD.TYPE_SFIXED64: func = long else: raise Error(\"Unknown field type %s\", fd.type) return", "isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values,", "FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func = int elif", "bool(value) if fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32", "in msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd)) return result def encode_func(fd): if fd.type", "dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg def decode_msg(dictionary, msg): msg.SetInParent() for key, value", "value is None: continue field = str(key) if isinstance(value, dict): decode_msg(value, getattr(msg, field))", "def decode_list(values, field, fd): if isinstance(values[0], dict): for v in values: dict_to_proto(v, field.add())", "or fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: return", "= proto_to_dict elif fd.type == FD.TYPE_BOOL: func = bool elif fd.type == FD.TYPE_STRING:", "unicode elif fd.type == FD.TYPE_BYTES: func = b64encode elif fd.type == FD.TYPE_DOUBLE or", "fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or fd.type", "encoded_value = encode_func(value) return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg def", "== FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type == FD.TYPE_BOOL: func = bool elif", "# -*- coding: utf-8 -*- from base64 import b64encode, b64decode from google.protobuf.descriptor import", "== FD.TYPE_BOOL: func = bool elif fd.type == FD.TYPE_STRING: func = unicode elif", "def proto_to_dict(msg): result = {} for fd, value in msg.ListFields(): result[fd.name] = encode_value(fd,", "isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else:", "as FD def proto_to_dict(msg): result = {} for fd, value in msg.ListFields(): result[fd.name]", "field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if isinstance(values[0],", "if fd.type == FD.TYPE_BYTES: return b64decode(value) if fd.type == FD.TYPE_BOOL: return bool(value) if", "proto_to_dict elif fd.type == FD.TYPE_BOOL: func = bool elif fd.type == FD.TYPE_STRING: func", "field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if isinstance(values[0], dict): for v in", "in dictionary.iteritems(): if value is None: continue field = str(key) if isinstance(value, dict):", "or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or", "fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: return long(value)", "func def encode_value(fd, value, encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value = [] for", "result = {} for fd, value in msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd))", "field)) elif isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field]))", "float elif fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32", "else: for v in values: field.append(decode_value(v, fd)) def decode_value(value, fd): if fd.type ==", "[] for v in value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return encoded_value def", "msg.SetInParent() for key, value in dictionary.iteritems(): if value is None: continue field =", "fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func =", "FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return int(value) if fd.type", "fd, value in msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd)) return result def encode_func(fd):", "FieldDescriptor as FD def proto_to_dict(msg): result = {} for fd, value in msg.ListFields():", "== FD.TYPE_BYTES: return b64decode(value) if fd.type == FD.TYPE_BOOL: return bool(value) if fd.type ==", "int(value) if fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64", "FD.TYPE_BOOL: return bool(value) if fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type", "return int(value) if fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type ==", "v in value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return encoded_value def dict_to_proto(dictionary, msg):", "fd.type == FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type", "fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func = int elif fd.type ==", "else: raise Error(\"Unknown field type %s\", fd.type) return func def encode_value(fd, value, encode_func):", "FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32", "values: field.append(decode_value(v, fd)) def decode_value(value, fd): if fd.type == FD.TYPE_BYTES: return b64decode(value) if", "decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd):", "== FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type ==", "if fd.type == FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type == FD.TYPE_BOOL: func =", "utf-8 -*- from base64 import b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor as FD", "def encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type == FD.TYPE_BOOL:", "fd.type) return func def encode_value(fd, value, encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value =", "in values: field.append(decode_value(v, fd)) def decode_value(value, fd): if fd.type == FD.TYPE_BYTES: return b64decode(value)", "str(key) if isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif isinstance(value, list): decode_list(value, getattr(msg, field),", "getattr(msg, field)) elif isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value,", "value in msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd)) return result def encode_func(fd): if", "continue field = str(key) if isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif isinstance(value, list):", "FD.TYPE_BYTES: return b64decode(value) if fd.type == FD.TYPE_BOOL: return bool(value) if fd.type == FD.TYPE_INT32", "%s\", fd.type) return func def encode_value(fd, value, encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value", "from google.protobuf.descriptor import FieldDescriptor as FD def proto_to_dict(msg): result = {} for fd,", "value in dictionary.iteritems(): if value is None: continue field = str(key) if isinstance(value,", "dict_to_proto(v, field.add()) else: for v in values: field.append(decode_value(v, fd)) def decode_value(value, fd): if", "== FD.TYPE_ENUM: return int(value) if fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or", "or fd.type == FD.TYPE_SFIXED64: func = long else: raise Error(\"Unknown field type %s\",", "value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg)", "FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type == FD.TYPE_BOOL: func = bool elif fd.type", "fd.type == FD.TYPE_STRING: func = unicode elif fd.type == FD.TYPE_BYTES: func = b64encode", "FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32", "is None: continue field = str(key) if isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif", "decode_msg(dictionary, msg): msg.SetInParent() for key, value in dictionary.iteritems(): if value is None: continue", "fd.label == FD.LABEL_REPEATED: encoded_value = [] for v in value: encoded_value.append(encode_func(v)) else: encoded_value", "{} for fd, value in msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd)) return result", "FD.TYPE_BYTES: func = b64encode elif fd.type == FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func", "== FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func = int", "elif fd.type == FD.TYPE_BYTES: func = b64encode elif fd.type == FD.TYPE_DOUBLE or fd.type", "FD.TYPE_SFIXED64: func = long else: raise Error(\"Unknown field type %s\", fd.type) return func", "else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if isinstance(values[0], dict): for", "dict): for v in values: dict_to_proto(v, field.add()) else: for v in values: field.append(decode_value(v,", "FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64:", "key, value in dictionary.iteritems(): if value is None: continue field = str(key) if", "= float elif fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type ==", "encode_func(value) return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg def decode_msg(dictionary, msg):", "elif fd.type == FD.TYPE_BOOL: func = bool elif fd.type == FD.TYPE_STRING: func =", "FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func = int elif fd.type == FD.TYPE_INT64 or", "def decode_value(value, fd): if fd.type == FD.TYPE_BYTES: return b64decode(value) if fd.type == FD.TYPE_BOOL:", "if isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field])", "== FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func = long else: raise Error(\"Unknown field", "encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg def decode_msg(dictionary, msg): msg.SetInParent() for", "coding: utf-8 -*- from base64 import b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor as", "fd.type == FD.TYPE_BOOL: func = bool elif fd.type == FD.TYPE_STRING: func = unicode", "b64encode elif fd.type == FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func = float elif", "-*- from base64 import b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor as FD def", "elif isinstance(value, list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def", "for v in values: field.append(decode_value(v, fd)) def decode_value(value, fd): if fd.type == FD.TYPE_BYTES:", "return msg def decode_msg(dictionary, msg): msg.SetInParent() for key, value in dictionary.iteritems(): if value", "FD def proto_to_dict(msg): result = {} for fd, value in msg.ListFields(): result[fd.name] =", "func = int elif fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type", "fd.type == FD.TYPE_BOOL: return bool(value) if fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32", "FD.TYPE_ENUM: func = int elif fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or", "encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return", "fd.type == FD.TYPE_ENUM: return int(value) if fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64", "field = str(key) if isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif isinstance(value, list): decode_list(value,", "func = long else: raise Error(\"Unknown field type %s\", fd.type) return func def", "== FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or fd.type ==", "in value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary,", "import FieldDescriptor as FD def proto_to_dict(msg): result = {} for fd, value in", "long else: raise Error(\"Unknown field type %s\", fd.type) return func def encode_value(fd, value,", "= long else: raise Error(\"Unknown field type %s\", fd.type) return func def encode_value(fd,", "values: dict_to_proto(v, field.add()) else: for v in values: field.append(decode_value(v, fd)) def decode_value(value, fd):", "= unicode elif fd.type == FD.TYPE_BYTES: func = b64encode elif fd.type == FD.TYPE_DOUBLE", "<filename>test/python/proto2dict.py # -*- coding: utf-8 -*- from base64 import b64encode, b64decode from google.protobuf.descriptor", "func = unicode elif fd.type == FD.TYPE_BYTES: func = b64encode elif fd.type ==", "decode_list(values, field, fd): if isinstance(values[0], dict): for v in values: dict_to_proto(v, field.add()) else:", "encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type == FD.TYPE_BOOL: func", "google.protobuf.descriptor import FieldDescriptor as FD def proto_to_dict(msg): result = {} for fd, value", "return bool(value) if fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type ==", "or fd.type == FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or", "msg.ListFields(): result[fd.name] = encode_value(fd, value, encode_func(fd)) return result def encode_func(fd): if fd.type ==", "FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func = float elif fd.type == FD.TYPE_INT32 or", "== FD.TYPE_FLOAT: func = float elif fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32", "if isinstance(values[0], dict): for v in values: dict_to_proto(v, field.add()) else: for v in", "if fd.type == FD.TYPE_BOOL: return bool(value) if fd.type == FD.TYPE_INT32 or fd.type ==", "FD.LABEL_REPEATED: encoded_value = [] for v in value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value)", "or fd.type == FD.TYPE_ENUM: return int(value) if fd.type == FD.TYPE_INT64 or fd.type ==", "def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg def decode_msg(dictionary, msg): msg.SetInParent() for key,", "fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func = long else: raise Error(\"Unknown", "elif fd.type == FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func = float elif fd.type", "if fd.type == FD.TYPE_INT32 or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or", "FD.TYPE_BOOL: func = bool elif fd.type == FD.TYPE_STRING: func = unicode elif fd.type", "for v in value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return encoded_value def dict_to_proto(dictionary,", "= encode_value(fd, value, encode_func(fd)) return result def encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func", "fd.type == FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64 or fd.type", "= [] for v in value: encoded_value.append(encode_func(v)) else: encoded_value = encode_func(value) return encoded_value", "or fd.type == FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func", "b64decode(value) if fd.type == FD.TYPE_BOOL: return bool(value) if fd.type == FD.TYPE_INT32 or fd.type", "return b64decode(value) if fd.type == FD.TYPE_BOOL: return bool(value) if fd.type == FD.TYPE_INT32 or", "raise Error(\"Unknown field type %s\", fd.type) return func def encode_value(fd, value, encode_func): if", "fd.type == FD.TYPE_FLOAT: func = float elif fd.type == FD.TYPE_INT32 or fd.type ==", "fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or fd.type", "== FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func = long", "setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if isinstance(values[0], dict): for v", "FD.TYPE_UINT64 or fd.type == FD.TYPE_SINT64 or fd.type == FD.TYPE_FIXED32 or fd.type == FD.TYPE_FIXED64", "value, encode_func): if fd.label == FD.LABEL_REPEATED: encoded_value = [] for v in value:", "v in values: dict_to_proto(v, field.add()) else: for v in values: field.append(decode_value(v, fd)) def", "== FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: return long(value) return", "fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: func =", "== FD.TYPE_BYTES: func = b64encode elif fd.type == FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT:", "import b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor as FD def proto_to_dict(msg): result =", "list): decode_list(value, getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field,", "= encode_func(value) return encoded_value def dict_to_proto(dictionary, msg): decode_msg(dictionary, msg) return msg def decode_msg(dictionary,", "= int elif fd.type == FD.TYPE_INT64 or fd.type == FD.TYPE_UINT64 or fd.type ==", "func = b64encode elif fd.type == FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func =", "for key, value in dictionary.iteritems(): if value is None: continue field = str(key)", "b64encode, b64decode from google.protobuf.descriptor import FieldDescriptor as FD def proto_to_dict(msg): result = {}", "= b64encode elif fd.type == FD.TYPE_DOUBLE or fd.type == FD.TYPE_FLOAT: func = float", "or fd.type == FD.TYPE_UINT32 or fd.type == FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return", "FD.TYPE_FIXED64 or fd.type == FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: return long(value) return value", "field type %s\", fd.type) return func def encode_value(fd, value, encode_func): if fd.label ==", "fd): if fd.type == FD.TYPE_BYTES: return b64decode(value) if fd.type == FD.TYPE_BOOL: return bool(value)", "== FD.TYPE_SINT32 or fd.type == FD.TYPE_ENUM: return int(value) if fd.type == FD.TYPE_INT64 or", "decode_value(value, fd): if fd.type == FD.TYPE_BYTES: return b64decode(value) if fd.type == FD.TYPE_BOOL: return", "decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if isinstance(values[0], dict): for v in values:", "for v in values: dict_to_proto(v, field.add()) else: for v in values: field.append(decode_value(v, fd))", "fd.type == FD.TYPE_MESSAGE: func = proto_to_dict elif fd.type == FD.TYPE_BOOL: func = bool", "getattr(msg, field), msg.DESCRIPTOR.fields_by_name[field]) else: setattr(msg, field, decode_value(value, msg.DESCRIPTOR.fields_by_name[field])) def decode_list(values, field, fd): if", "encode_func(fd)) return result def encode_func(fd): if fd.type == FD.TYPE_MESSAGE: func = proto_to_dict elif", "Error(\"Unknown field type %s\", fd.type) return func def encode_value(fd, value, encode_func): if fd.label", "FD.TYPE_SFIXED32 or fd.type == FD.TYPE_SFIXED64: func = long else: raise Error(\"Unknown field type", "dictionary.iteritems(): if value is None: continue field = str(key) if isinstance(value, dict): decode_msg(value,", "b64decode from google.protobuf.descriptor import FieldDescriptor as FD def proto_to_dict(msg): result = {} for", "None: continue field = str(key) if isinstance(value, dict): decode_msg(value, getattr(msg, field)) elif isinstance(value," ]
[ "# The directory containing this file HERE = pathlib.Path(__file__).parent # The text of", "url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI Approved :: MIT License\", \"Programming", "import setup, find_packages import codecs import re # The directory containing this file", "+ TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def read(*parts): #", "[\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE", "DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\": LINTERS_REQUIRE, \"docs\": DOCS_REQUIRE, }, project_urls={\"Documentation\":", "= [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE +", "find version string.\") # This call to setup() does all the work setup(", "Approved :: MIT License\", \"Programming Language :: Python :: 3\", \"Programming Language ::", "3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE,", "PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE +", "MIT License\", \"Programming Language :: Python :: 3\", \"Programming Language :: Python ::", "license=\"MIT\", classifiers=[ \"License :: OSI Approved :: MIT License\", \"Programming Language :: Python", ":: OSI Approved :: MIT License\", \"Programming Language :: Python :: 3\", \"Programming", "as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read() def find_version(*file_paths): version_file =", "method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\",", "description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI Approved", "OSI Approved :: MIT License\", \"Programming Language :: Python :: 3\", \"Programming Language", "\"License :: OSI Approved :: MIT License\", \"Programming Language :: Python :: 3\",", "to find version string.\") # This call to setup() does all the work", "fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",", "TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\": LINTERS_REQUIRE, \"docs\": DOCS_REQUIRE, }, project_urls={\"Documentation\": \"https://xyxyxy.readthedocs.io/en/latest/\"}, )", "if version_match: return version_match.group(1) raise RuntimeError(\"Unable to find version string.\") # This call", "\"r\") as fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r\"^__version__", "version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError(\"Unable to find version string.\") #", ":: MIT License\", \"Programming Language :: Python :: 3\", \"Programming Language :: Python", "codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read() def", "+ TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def read(*parts): # with codecs.open(os.path.join(HERE,", "\"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"]", "work setup( name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README,", "\"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE +", "setuptools import setup, find_packages import codecs import re # The directory containing this", "extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\": LINTERS_REQUIRE, \"docs\": DOCS_REQUIRE,", "file HERE = pathlib.Path(__file__).parent # The text of the README file README =", "fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M)", "\"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\": LINTERS_REQUIRE, \"docs\": DOCS_REQUIRE, },", "README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE =", "Language :: Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={", "[\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\": LINTERS_REQUIRE, \"docs\":", "[\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"]", "from setuptools import setup, find_packages import codecs import re # The directory containing", "Python :: 3\", \"Programming Language :: Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True,", "pathlib from setuptools import setup, find_packages import codecs import re # The directory", "version_file = read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return", "read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return version_match.group(1) raise", "https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License", "\"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\": LINTERS_REQUIRE, \"docs\": DOCS_REQUIRE, }, project_urls={\"Documentation\": \"https://xyxyxy.readthedocs.io/en/latest/\"},", ") def read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\")", "= [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE =", "TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE =", "setup() does all the work setup( name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\",", "classifiers=[ \"License :: OSI Approved :: MIT License\", \"Programming Language :: Python ::", "fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths)", ":: 3\", \"Programming Language :: Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"],", "install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\":", ":: Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\":", "/ \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE =", "call to setup() does all the work setup( name=\"projectxyxyxy\", # First method from", "return version_match.group(1) raise RuntimeError(\"Unable to find version string.\") # This call to setup()", "list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE", "def read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as", "include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE,", "= list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE +", "all the work setup( name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project", "License\", \"Programming Language :: Python :: 3\", \"Programming Language :: Python :: 3.7\",", "[\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE", "+ LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE +", "directory containing this file HERE = pathlib.Path(__file__).parent # The text of the README", "This call to setup() does all the work setup( name=\"projectxyxyxy\", # First method", "author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI Approved :: MIT License\", \"Programming Language ::", "= re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError(\"Unable to", "re # The directory containing this file HERE = pathlib.Path(__file__).parent # The text", "python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE,", ":: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\":", "\"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE", "FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE ) )", "LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"]", "text of the README file README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"]", "# The text of the README file README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE", "version string.\") # This call to setup() does all the work setup( name=\"projectxyxyxy\",", ":: Python :: 3\", \"Programming Language :: Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(),", "raise RuntimeError(\"Unable to find version string.\") # This call to setup() does all", "TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def read(*parts): # with codecs.open(os.path.join(HERE, *parts),", "to setup() does all the work setup( name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/", "= [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list(", "string.\") # This call to setup() does all the work setup( name=\"projectxyxyxy\", #", "# This call to setup() does all the work setup( name=\"projectxyxyxy\", # First", "import re # The directory containing this file HERE = pathlib.Path(__file__).parent # The", "Language :: Python :: 3\", \"Programming Language :: Python :: 3.7\", ], python_requires=\">=3.6\",", "README file README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"]", "+ FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE )", "AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE", "\"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read() def find_version(*file_paths): version_file", "Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE,", "of the README file README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE", "pathlib.Path(__file__).parent # The text of the README file README = (HERE / \"README.md\").read_text()", "DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE", "long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI Approved :: MIT License\",", "[\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\",", "setup( name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\",", "version_match: return version_match.group(1) raise RuntimeError(\"Unable to find version string.\") # This call to", "re.M) if version_match: return version_match.group(1) raise RuntimeError(\"Unable to find version string.\") # This", "description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI Approved ::", "+ NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def", "+ DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\")", "import codecs import re # The directory containing this file HERE = pathlib.Path(__file__).parent", "= [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\",", "re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError(\"Unable to find", "PUBLISH_REQUIRE ) ) def read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with", "First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "<reponame>maximlt/project<gh_stars>0 import pathlib from setuptools import setup, find_packages import codecs import re #", "def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if", "FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\",", "TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE =", "name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\",", "entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\": FORMATTER_REQUIRE, \"linters\": LINTERS_REQUIRE,", "3\", \"Programming Language :: Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\":", "The directory containing this file HERE = pathlib.Path(__file__).parent # The text of the", "from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[", "= read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return version_match.group(1)", "# with codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return", "the work setup( name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\",", ") ) def read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts),", "= (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"]", "= [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE", "LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE", "find_packages import codecs import re # The directory containing this file HERE =", "with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match", "return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file,", "import pathlib from setuptools import setup, find_packages import codecs import re # The", "this file HERE = pathlib.Path(__file__).parent # The text of the README file README", "[\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE +", "TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def read(*parts): # with", "# First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\",", "+ PUBLISH_REQUIRE ) ) def read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\") as fp:", "version_match.group(1) raise RuntimeError(\"Unable to find version string.\") # This call to setup() does", "long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI Approved :: MIT", "codecs import re # The directory containing this file HERE = pathlib.Path(__file__).parent #", "version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError(\"Unable", "DEV_REQUIRE = list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE", "does all the work setup( name=\"projectxyxyxy\", # First method from https://packaging.python.org/guides/single-sourcing-package-version/ version=find_version(\"project\", \"__init__.py\"),", "HERE = pathlib.Path(__file__).parent # The text of the README file README = (HERE", "NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE =", "setup, find_packages import codecs import re # The directory containing this file HERE", "version=find_version(\"project\", \"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License ::", "\"__init__.py\"), description=\"Project description\", long_description=README, long_description_content_type=\"text/markdown\", url=\"https://github.com/maximlt/project\", author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI", "NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE + DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def read(*parts):", "= [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"]", "author=\"<NAME>\", author_email=\"<EMAIL>\", license=\"MIT\", classifiers=[ \"License :: OSI Approved :: MIT License\", \"Programming Language", "\"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE", "RuntimeError(\"Unable to find version string.\") # This call to setup() does all the", "set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE + TEST_REQUIRE + TEST_NOTEBOOKS_REQUIRE +", "\"Programming Language :: Python :: 3.7\", ], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]},", "= pathlib.Path(__file__).parent # The text of the README file README = (HERE /", "containing this file HERE = pathlib.Path(__file__).parent # The text of the README file", "codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match =", "The text of the README file README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE =", "with codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read()", "= ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError(\"Unable to find version", "['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError(\"Unable to find version string.\")", "the README file README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE =", "= [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE = [\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"]", "read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp:", "DOCS_REQUIRE + PUBLISH_REQUIRE ) ) def read(*parts): # with codecs.open(os.path.join(HERE, *parts), \"r\") as", "file README = (HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE", "[\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set( AUTOMATION_REQUIRE + LINTERS_REQUIRE + FORMATTER_REQUIRE + NOTEBOOK_REQUIRE", "[\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE =", "AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE", "\"Programming Language :: Python :: 3\", \"Programming Language :: Python :: 3.7\", ],", "], python_requires=\">=3.6\", packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\":", "= [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE = [\"jupyterlab>=1.0.4\"] TEST_REQUIRE = [\"pytest>=5.0.1\", \"pytest-cov>=2.7.1\"] TEST_NOTEBOOKS_REQUIRE", "find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file, re.M) if version_match:", "[\"pytest>=5.0.1\", \"nbval>=0.1\"] PUBLISH_REQUIRE = [\"twine>=1.13.0\"] DOCS_REQUIRE = [\"sphinx>=2.1.2\", \"sphinx-rtd-theme>=0.4.3\"] DEV_REQUIRE = list( set(", "(HERE / \"README.md\").read_text() AUTOMATION_REQUIRE = [\"tox>=3.12.1\"] LINTERS_REQUIRE = [\"flake8>=3.7.8\"] FORMATTER_REQUIRE = [\"black>=19.3b0\"] NOTEBOOK_REQUIRE", "*parts), \"r\") as fp: with codecs.open(HERE.joinpath(*parts), \"r\") as fp: return fp.read() def find_version(*file_paths):", "packages=find_packages(), include_package_data=True, install_requires=[\"Click>=7.0\"], entry_points={\"console_scripts\": [\"calc=project:cli\"]}, extras_require={ \"dev\": DEV_REQUIRE, \"test\": TEST_REQUIRE, \"test_notebooks\": TEST_NOTEBOOKS_REQUIRE, \"formatter\":", "as fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search(r\"^__version__ =" ]
[ "def up_on(): print(\"[debug] turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def", "RPi.GPIO as gpio def up_off(): print(\"[debug] turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def", "off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on()", "gpio def up_off(): print(\"[debug] turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug]", "_OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on()", "_OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on()", "def down_on(): print(\"[debug] turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def", "print(\"[debug] turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN engines", "print(\"[debug] turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP engines", "def all_off(): print(\"[debug] turning all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def", "turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning", "DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN engines on\") _OUT.left_up.off()", "engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning all engines", "def down_off(): print(\"[debug] turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning", "UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN", "_OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on()", "turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP engines on\")", "turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN engines on\")", "DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning all", "on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN engines off\")", "as gpio def up_off(): print(\"[debug] turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on():", "_OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off()", "down_off(): print(\"[debug] turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN", "_OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on()", "_OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def", "_OUT.right_down.on() def all_off(): print(\"[debug] turning all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off()", "on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning all engines off\")", "up_on(): print(\"[debug] turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off():", "_OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning all engines off\") _OUT.main.off() _OUT.left_up.off()", "engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off()", "print(\"[debug] turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug]", "print(\"[debug] turning all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def initialize(outputs): global", "UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP engines on\") _OUT.left_down.off()", "down_on(): print(\"[debug] turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off():", "off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def initialize(outputs): global _OUT _OUT = outputs", "engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off()", "_OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off()", "_OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug]", "all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def initialize(outputs): global _OUT _OUT", "_OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN engines off\") _OUT.left_down.off() _OUT.right_down.off() def down_on():", "turning UP engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning", "_OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def initialize(outputs): global _OUT _OUT = outputs all_off()", "all_off(): print(\"[debug] turning all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def initialize(outputs):", "off\") _OUT.left_down.off() _OUT.right_down.off() def down_on(): print(\"[debug] turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on()", "engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def initialize(outputs): global _OUT _OUT =", "_OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off()", "_OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN engines off\") _OUT.left_down.off()", "def up_off(): print(\"[debug] turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning", "turning all engines off\") _OUT.main.off() _OUT.left_up.off() _OUT.right_up.off() _OUT.left_down.off() _OUT.right_down.off() def initialize(outputs): global _OUT", "print(\"[debug] turning DOWN engines on\") _OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug]", "import RPi.GPIO as gpio def up_off(): print(\"[debug] turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off()", "_OUT.left_up.off() _OUT.right_up.off() _OUT.main.on() _OUT.left_down.on() _OUT.right_down.on() def all_off(): print(\"[debug] turning all engines off\") _OUT.main.off()", "up_off(): print(\"[debug] turning UP engines off\") _OUT.left_up.off() _OUT.RIGHT_UP.off() def up_on(): print(\"[debug] turning UP", "engines on\") _OUT.left_down.off() _OUT.right_down.off() _OUT.main.on() _OUT.left_up.on() _OUT.right_up.on() def down_off(): print(\"[debug] turning DOWN engines" ]
[ "res = \"unique\" self.assertEqual(us23(df), res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11]", "= Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not", "df = individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged')", "sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23 import Project02 import", "= True) res = \"not unique\" self.assertEqual(us23(df), res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged')", "df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def test4(self): individuals =", "True) res = \"not unique\" self.assertEqual(us23(df), res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df", "def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True)", "= individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\" self.assertEqual(us23(df), res) def", "def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True)", "\"not unique\" self.assertEqual(us23(df), res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df", "unique\" self.assertEqual(us23(df), res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df =", "\"unique\" self.assertEqual(us23(df), res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df =", "Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not unique\"", "= \"unique\" self.assertEqual(us23(df), res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df", "self.assertEqual(us23(df), res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop", "us23 import Project02 import unittest class us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df", "= True) res = \"unique\" self.assertEqual(us23(df), res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df", "df = df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def test2(self):", "= df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def test2(self): individuals", "df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df),", "= individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res)", "test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True) res", "14:29:04 2020 @author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd", "Created on Mon Oct 5 14:29:04 2020 @author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests')))", "class us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop", "individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def", "individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res) def test3(self):", "= \"not unique\" self.assertEqual(us23(df), res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5]", "sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23 import Project02", "= \"unique\" self.assertEqual(us23(df), res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df", "import Project02 import unittest class us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df =", "self.assertEqual(us23(df), res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res = \"unique\"", "res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop =", "individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) unittest.main(argv=['first-arg-is-ignored'],", "\"unique\" self.assertEqual(us23(df), res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df =", "individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True) res =", "individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def", "import unittest class us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df", "df = df.append(individuals.iloc[4]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) unittest.main(argv=['first-arg-is-ignored'], exit=False)", "us23 import us23 import Project02 import unittest class us23_test(unittest.TestCase): def test1(self): individuals =", "Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res) def test3(self): individuals =", "= individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df", "df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\" self.assertEqual(us23(df), res)", "def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res)", "= os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23 import Project02 import unittest class", "df = df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def test4(self):", "= Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True) res = \"not", "@author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd()", "on Mon Oct 5 14:29:04 2020 @author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import", "unique\" self.assertEqual(us23(df), res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res =", "-*- coding: utf-8 -*- \"\"\" Created on Mon Oct 5 14:29:04 2020 @author:", "= Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res) def test3(self): individuals", "Project02 import unittest class us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3]", "individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True) res =", "res = \"not unique\" self.assertEqual(us23(df), res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df =", "df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def test2(self): individuals =", "res = \"unique\" self.assertEqual(us23(df), res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7]", "individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True) res =", "Mon Oct 5 14:29:04 2020 @author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys", "test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True) res", "ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd,", "individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True) res =", "Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\" self.assertEqual(us23(df),", "import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from", "utf-8 -*- \"\"\" Created on Mon Oct 5 14:29:04 2020 @author: ptrda \"\"\"", "\"not unique\" self.assertEqual(us23(df), res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res", "from us23 import us23 import Project02 import unittest class us23_test(unittest.TestCase): def test1(self): individuals", "= Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\"", "= individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res)", "res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop =", "= True) res = \"not unique\" self.assertEqual(us23(df), res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged')", "res) def test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res = \"unique\" self.assertEqual(us23(df),", "individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\" self.assertEqual(us23(df), res) def test5(self):", "self.assertEqual(us23(df), res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop", "Oct 5 14:29:04 2020 @author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories'))", "-*- \"\"\" Created on Mon Oct 5 14:29:04 2020 @author: ptrda \"\"\" import", "2020 @author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd =", "= \"not unique\" self.assertEqual(us23(df), res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9]", "True) res = \"not unique\" self.assertEqual(us23(df), res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df", "# -*- coding: utf-8 -*- \"\"\" Created on Mon Oct 5 14:29:04 2020", "os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23 import Project02 import unittest class us23_test(unittest.TestCase): def", "= individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res)", "os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23", "cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23 import Project02 import unittest", "test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True) res", "def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:9] df = df.append(individuals.iloc[11]).reset_index(drop = True)", "\"\"\" Created on Mon Oct 5 14:29:04 2020 @author: ptrda \"\"\" import os", "test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop = True) res", "df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\" self.assertEqual(us23(df), res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged')", "us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df = df.append(individuals.iloc[0]).reset_index(drop =", "res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop =", "'seeds')) from us23 import us23 import Project02 import unittest class us23_test(unittest.TestCase): def test1(self):", "os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import", "individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res) def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df =", "self.assertEqual(us23(df), res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop", "Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not unique\"", "res = \"not unique\" self.assertEqual(us23(df), res) def test4(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df =", "unittest class us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:3] df =", "= df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\" self.assertEqual(us23(df), res) def test5(self): individuals =", "os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23 import Project02 import unittest class us23_test(unittest.TestCase):", "test2(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:5] res = \"unique\" self.assertEqual(us23(df), res) def", "sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23 import", "def test3(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True)", "import us23 import Project02 import unittest class us23_test(unittest.TestCase): def test1(self): individuals = Project02.createIndividualsDataFrame('seed.ged')", "Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True) res = \"not unique\"", "= Project02.createIndividualsDataFrame('seed.ged') df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not", "df = individuals[0:11] df = df.append(individuals.iloc[4]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df),", "df = df.append(individuals.iloc[11]).reset_index(drop = True) res = \"unique\" self.assertEqual(us23(df), res) def test5(self): individuals", "df = individuals[0:7] df = df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df),", "import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds')) from us23 import us23", "True) res = \"unique\" self.assertEqual(us23(df), res) def test5(self): individuals = Project02.createIndividualsDataFrame('seed.ged') df =", "coding: utf-8 -*- \"\"\" Created on Mon Oct 5 14:29:04 2020 @author: ptrda", "\"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src')) cwd = os.getcwd() os.chdir(os.path.join(cwd, 'seeds'))", "= df.append(individuals.iloc[3]).reset_index(drop = True) res = \"not unique\" self.assertEqual(us23(df), res) def test4(self): individuals", "5 14:29:04 2020 @author: ptrda \"\"\" import os os.chdir(os.path.dirname(os.path.abspath('../tests'))) import sys sys.path.append(os.path.abspath('../Team-4-Code/src/UserStories')) sys.path.append(os.path.abspath('../Team-4-Code/src'))" ]
[ "import (PasswordResetView, PasswordResetValidationView) urlpatterns = [ path('password-reset/', PasswordResetView.as_view(), name='password_reset'), path('password-reset/validate/', PasswordResetValidationView.as_view(), name='password_reset-validate'), ]", "import path from .views import (PasswordResetView, PasswordResetValidationView) urlpatterns = [ path('password-reset/', PasswordResetView.as_view(), name='password_reset'),", "from django.urls import path from .views import (PasswordResetView, PasswordResetValidationView) urlpatterns = [ path('password-reset/',", ".views import (PasswordResetView, PasswordResetValidationView) urlpatterns = [ path('password-reset/', PasswordResetView.as_view(), name='password_reset'), path('password-reset/validate/', PasswordResetValidationView.as_view(), name='password_reset-validate'),", "from .views import (PasswordResetView, PasswordResetValidationView) urlpatterns = [ path('password-reset/', PasswordResetView.as_view(), name='password_reset'), path('password-reset/validate/', PasswordResetValidationView.as_view(),", "django.urls import path from .views import (PasswordResetView, PasswordResetValidationView) urlpatterns = [ path('password-reset/', PasswordResetView.as_view(),", "path from .views import (PasswordResetView, PasswordResetValidationView) urlpatterns = [ path('password-reset/', PasswordResetView.as_view(), name='password_reset'), path('password-reset/validate/'," ]
[ "\"b\": 2, \"c\": 3} # Without finally try: value = my_dict[\"a\"] except KeyError:", "KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") finally: print(\"The finally statement ran!\")", "<reponame>turovod/Otus my_dict = {\"a\": 1, \"b\": 2, \"c\": 3} # Without finally try:", "my_dict = {\"a\": 1, \"b\": 2, \"c\": 3} # Without finally try: value", "else: print(\"No error occurred!\") # With finally try: value = my_dict[\"a\"] except KeyError:", "2, \"c\": 3} # Without finally try: value = my_dict[\"a\"] except KeyError: print(\"A", "except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") # With finally try:", "try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\")", "value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") #", "finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error", "1, \"b\": 2, \"c\": 3} # Without finally try: value = my_dict[\"a\"] except", "print(\"No error occurred!\") # With finally try: value = my_dict[\"a\"] except KeyError: print(\"A", "# With finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else:", "\"c\": 3} # Without finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError", "= my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") # With", "except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") finally: print(\"The finally statement", "my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") # With finally", "occurred!\") else: print(\"No error occurred!\") # With finally try: value = my_dict[\"a\"] except", "occurred!\") # With finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\")", "error occurred!\") # With finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError", "Without finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No", "value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") finally:", "KeyError occurred!\") else: print(\"No error occurred!\") # With finally try: value = my_dict[\"a\"]", "= {\"a\": 1, \"b\": 2, \"c\": 3} # Without finally try: value =", "With finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No", "KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") # With finally try: value", "= my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") finally: print(\"The", "print(\"A KeyError occurred!\") else: print(\"No error occurred!\") # With finally try: value =", "my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else: print(\"No error occurred!\") finally: print(\"The finally", "{\"a\": 1, \"b\": 2, \"c\": 3} # Without finally try: value = my_dict[\"a\"]", "# Without finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\") else:", "3} # Without finally try: value = my_dict[\"a\"] except KeyError: print(\"A KeyError occurred!\")" ]
[ "of the Nazi Party, made him a millionaire. \", \"Hitler had dreams of", "print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have we run this code before? If", "chocolate and sugar. He sometimes took as many as five teaspoons of sugar", "unsuccessful lessons in piano and violin and also dabbled in the flute and", "be put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics", "def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have we", "Have we run this code before? If not, create an empty list if", "this code before? If not, create an empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to", "was a voracious ‘sweet tooth’, consuming large amounts of cake, pastries, chocolate and", "the troops super tough. It was found that they could march 55 miles", "print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics):", "a comprehensive set of laws for animal protection. When all of these were", "something about animal cruelty. With the new Reich, there will be no grounds", "that they could march 55 miles without any tiredness which is pretty amazing.", "post.id in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts", "we have replied to else: # Read the file into a list and", "frequently.\", \"Though he shunned meat, Hitler was a voracious ‘sweet tooth’, consuming large", "you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line", "Aryans”.\", \"In a pre-cursor to modern stances and laws in this area, the", "# fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics) #", "there will be no grounds for any form of animal abuse and cruelty.\",", "his tea.\", \"When the regime came into power in 1933, they passed a", "# If we have run the code before, load the list of posts", "had short but unsuccessful lessons in piano and violin and also dabbled in", "limit: int) -> list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline):", "\"In a pre-cursor to modern stances and laws in this area, the Nazi", "their main objective was to free the world of Jews. However, Hilter unknowingly", "consuming large amounts of cake, pastries, chocolate and sugar. He sometimes took as", "trials when tested and made the troops super tough. It was found that", "filtered_posts def reply_to_post(post): # Reply to the post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did", "file into a list and remove any empty values with open(\"posts_replied_to.txt\", \"r\") as", "initiate_nlp() # create posts_analytics posts_analytics = get_posts_analytics() # fetch reddit posts posts =", "many, <NAME> was ready to target Maurice for expulsion. Hitler came to the", "sentence in sentences: for word in sentence.words: if word.upos not in ['NOUN', 'VERB',", "respond to filtered posts with open(\"posts_replied_to.txt\", \"a\") as f: for post in filtered_posts:", "posts: if post.id in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post)", "Nazi Party, made him a millionaire. \", \"Hitler had dreams of playing a", "= quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids =", "variable LOCATIONS = [ \" Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to", "# check posts replied to posts_replied_to = get_posts_replied_to() # initiate nlp nlp_pipe =", "posts_replied_to) # respond to filtered posts with open(\"posts_replied_to.txt\", \"a\") as f: for post", "before, load the list of posts we have replied to else: # Read", "large amounts of cake, pastries, chocolate and sugar. He sometimes took as many", "smoking and lung cancer which meant that a fierce anti-smoking campaign began under", "os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If we have run the code before, load", "musical instrument. He had short but unsuccessful lessons in piano and violin and", "all duplicates and keep original order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\")", "values with open(ANALYTICS_JSON, \"r\") as f: posts_analytics = loads(f.read()) return posts_analytics def initiate_nlp()", "in keywords] # remove all duplicates and keep original order unique_keywords = list(dict.fromkeys(keywords))", "set of laws for animal protection. When all of these were in place,", "under Hitler. The Nazi leadership strongly condemned smoking and advised the general population", "in place, Hitler said something about animal cruelty. With the new Reich, there", "into power in 1933, they passed a comprehensive set of laws for animal", "# initiate nlp nlp_pipe = initiate_nlp() # create posts_analytics posts_analytics = get_posts_analytics() #", "When all of these were in place, Hitler said something about animal cruelty.", "str, limit: int) -> list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe:", "all keywords in lower case keywords = [keyword.lower() for keyword in keywords] #", "successful in trials when tested and made the troops super tough. It was", "in 1933, they passed a comprehensive set of laws for animal protection. When", "of sugar in his tea.\", \"When the regime came into power in 1933,", "any empty values with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to = f.read() posts_replied_to =", "to give it up.\", \"During the Second World War, German doctors came up", "= [ \" Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to as the", "55 miles without any tiredness which is pretty amazing. The plan was to", "for keyword in keywords] # remove all duplicates and keep original order unique_keywords", "posts_analytics with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) # filter for keywords filtered_posts =", "shunned meat, Hitler was a voracious ‘sweet tooth’, consuming large amounts of cake,", "cancer which meant that a fierce anti-smoking campaign began under Hitler. The Nazi", "millionaire. \", \"Hitler had dreams of playing a musical instrument. He had short", "in posts_analytics with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) # filter for keywords filtered_posts", "plan was to roll it out to all soldiers serving in the war", "Hitler came to the rescue and made an exception for him and his", "<NAME> was ready to target Maurice for expulsion. Hitler came to the rescue", "with a methamphetamine based experimental drug to increase soldier’s performance. This was very", "post.id in post_ids: continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list): keywords = []", "'.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids = [post_id for", "unknowingly had a Jewish chauffeur. <NAME> was also his friend and personal chauffeur.", "load the list of posts we have replied to else: # Read the", "a methamphetamine based experimental drug to increase soldier’s performance. This was very successful", "pretty amazing. The plan was to roll it out to all soldiers serving", "any empty values with open(ANALYTICS_JSON, \"r\") as f: posts_analytics = loads(f.read()) return posts_analytics", "got known to many, <NAME> was ready to target Maurice for expulsion. Hitler", "values with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to", "found that they could march 55 miles without any tiredness which is pretty", "titles for post in analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp", "have replied to else: # Read the file into a list and remove", "to target Maurice for expulsion. Hitler came to the rescue and made an", "word in sentence.words: if word.upos not in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text)", "int) -> list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc", "out to all soldiers serving in the war but the German’s lost before", "was found that they could march 55 miles without any tiredness which is", "[] for post in posts: if post.id in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\",", "was also his friend and personal chauffeur. When it got known to many,", "# variable LOCATIONS = [ \" Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred", "posts_analytics.append(nlp_data) # store nlp doc in posts_analytics with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics))", "to ban smoking. Nazi doctors were the first to establish a link between", "keywords = [] for sentence in sentences: for word in sentence.words: if word.upos", "sugar. He sometimes took as many as five teaspoons of sugar in his", "a voracious ‘sweet tooth’, consuming large amounts of cake, pastries, chocolate and sugar.", "posts_analytics] filtered_posts = [] for post in posts: if post.id in post_ids: continue", "known to many, <NAME> was ready to target Maurice for expulsion. Hitler came", "duplicates and keep original order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords", "and made an exception for him and his brothers. He called them “honorary", "a Jewish chauffeur. <NAME> was also his friend and personal chauffeur. When it", "campaign began under Hitler. The Nazi leadership strongly condemned smoking and advised the", "\\\"<NAME>\\\"sometimes referred to as the bible of the Nazi Party, made him a", "laws in this area, the Nazi party were the first people to ban", "any tiredness which is pretty amazing. The plan was to roll it out", "post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def reply_to_post(post): # Reply to the post", "before it could be put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if", "animal protection. When all of these were in place, Hitler said something about", "with open(ANALYTICS_JSON, \"r\") as f: posts_analytics = loads(f.read()) return posts_analytics def initiate_nlp() ->", "called them “honorary Aryans”.\", \"In a pre-cursor to modern stances and laws in", "# store nlp doc in posts_analytics with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) #", "to increase soldier’s performance. This was very successful in trials when tested and", "filter_analytics(posts, posts_analytics) # read submission titles for post in analytics_filtered: nlp_data = process_post(post,", "'NUM', 'PROPN']: continue keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to): filtered_posts = [] for", "form of animal abuse and cruelty.\", \"It’s already a known fact that during", "keywords filtered_posts = filter_posts(posts, posts_replied_to) # respond to filtered posts with open(\"posts_replied_to.txt\", \"a\")", "the general population to give it up.\", \"During the Second World War, German", "filter_posts(posts, posts_replied_to) # respond to filtered posts with open(\"posts_replied_to.txt\", \"a\") as f: for", "Jewish chauffeur. <NAME> was also his friend and personal chauffeur. When it got", "made him a millionaire. \", \"Hitler had dreams of playing a musical instrument.", "The plan was to roll it out to all soldiers serving in the", "the world of Jews. However, Hilter unknowingly had a Jewish chauffeur. <NAME> was", "Read the file into a list and remove any empty values with open(\"posts_replied_to.txt\",", "performance. This was very successful in trials when tested and made the troops", "for animal protection. When all of these were in place, Hitler said something", "will be no grounds for any form of animal abuse and cruelty.\", \"It’s", "expulsion. Hitler came to the rescue and made an exception for him and", "pre-cursor to modern stances and laws in this area, the Nazi party were", "objective was to free the world of Jews. However, Hilter unknowingly had a", "war but the German’s lost before it could be put into place.\"] ANALYTICS_JSON", "initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str,", "the file into a list and remove any empty values with open(\"posts_replied_to.txt\", \"r\")", "put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics =", "= [] for post in posts: if post.id in posts_replied_to: continue if not", "soldiers serving in the war but the German’s lost before it could be", "miles without any tiredness which is pretty amazing. The plan was to roll", "and his brothers. He called them “honorary Aryans”.\", \"In a pre-cursor to modern", "line): f.write(line + \"\\n\") if __name__ == '__main__': # log into reddit reddit", "but the German’s lost before it could be put into place.\"] ANALYTICS_JSON =", "get_posts_replied_to(): # Have we run this code before? If not, create an empty", "read submission titles for post in analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) #", "list): keywords = [] for sentence in sentences: for word in sentence.words: if", "Jews. However, Hilter unknowingly had a Jewish chauffeur. <NAME> was also his friend", "before? If not, create an empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = []", "\"During the Second World War, German doctors came up with a methamphetamine based", "filtered_posts def get_keywords_from_post(sentences: list): keywords = [] for sentence in sentences: for word", "made the troops super tough. It was found that they could march 55", "dumps from random import randint import stanza import praw import re import os", "{LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line + \"\\n\") if", "= initiate_nlp() # create posts_analytics posts_analytics = get_posts_analytics() # fetch reddit posts posts", "run the code before, load the list of posts we have replied to", "posts_analytics posts_analytics = get_posts_analytics() # fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered", "these were in place, Hitler said something about animal cruelty. With the new", "and harmonica. In the end, he settled for whistling, which he did frequently.\",", "and also dabbled in the flute and harmonica. In the end, he settled", "this area, the Nazi party were the first people to ban smoking. Nazi", "world of Jews. However, Hilter unknowingly had a Jewish chauffeur. <NAME> was also", "open(ANALYTICS_JSON, \"r\") as f: posts_analytics = loads(f.read()) return posts_analytics def initiate_nlp() -> Pipeline:", "URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids", "and made the troops super tough. It was found that they could march", "it up.\", \"During the Second World War, German doctors came up with a", "unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return", "store_line(f, line): f.write(line + \"\\n\") if __name__ == '__main__': # log into reddit", "reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics) # read submission", "already a known fact that during Hitler’s reign, their main objective was to", "and violin and also dabbled in the flute and harmonica. In the end,", "= f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to # variable", "posts_replied_to = get_posts_replied_to() # initiate nlp nlp_pipe = initiate_nlp() # create posts_analytics posts_analytics", "'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to): filtered_posts = []", "known fact that during Hitler’s reign, their main objective was to free the", "# read submission titles for post in analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data)", "did frequently.\", \"Though he shunned meat, Hitler was a voracious ‘sweet tooth’, consuming", "from random import randint import stanza import praw import re import os from", "def store_line(f, line): f.write(line + \"\\n\") if __name__ == '__main__': # log into", "sentences: for word in sentence.words: if word.upos not in ['NOUN', 'VERB', 'NUM', 'PROPN']:", "“honorary Aryans”.\", \"In a pre-cursor to modern stances and laws in this area,", "could be put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON):", "praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have we run this code before?", "not, create an empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If", "f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to # variable LOCATIONS", "came up with a methamphetamine based experimental drug to increase soldier’s performance. This", "grounds for any form of animal abuse and cruelty.\", \"It’s already a known", "len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f,", "settled for whistling, which he did frequently.\", \"Though he shunned meat, Hitler was", "free the world of Jews. However, Hilter unknowingly had a Jewish chauffeur. <NAME>", "os from urllib.parse import quote from stanza import Pipeline def log_into_reddit(): reddit =", "get_posts_replied_to() # initiate nlp nlp_pipe = initiate_nlp() # create posts_analytics posts_analytics = get_posts_analytics()", "fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics) # read submission titles for post in", "10) analytics_filtered = filter_analytics(posts, posts_analytics) # read submission titles for post in analytics_filtered:", "\"r\") as f: posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to))", "= [] for sentence in sentences: for word in sentence.words: if word.upos not", "keywords = [keyword.lower() for keyword in keywords] # remove all duplicates and keep", "ready to target Maurice for expulsion. Hitler came to the rescue and made", "praw import re import os from urllib.parse import quote from stanza import Pipeline", "be no grounds for any form of animal abuse and cruelty.\", \"It’s already", "Maurice for expulsion. Hitler came to the rescue and made an exception for", "if __name__ == '__main__': # log into reddit reddit = log_into_reddit() # check", "in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def", "place, Hitler said something about animal cruelty. With the new Reich, there will", "\"Hitler had dreams of playing a musical instrument. He had short but unsuccessful", "it out to all soldiers serving in the war but the German’s lost", "as five teaspoons of sugar in his tea.\", \"When the regime came into", "the flute and harmonica. In the end, he settled for whistling, which he", "keywords def filter_posts(posts, posts_replied_to): filtered_posts = [] for post in posts: if post.id", "in analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc in posts_analytics", "were the first people to ban smoking. Nazi doctors were the first to", "doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write all keywords in lower case", "brothers. He called them “honorary Aryans”.\", \"In a pre-cursor to modern stances and", "He sometimes took as many as five teaspoons of sugar in his tea.\",", "= list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id,", "into reddit reddit = log_into_reddit() # check posts replied to posts_replied_to = get_posts_replied_to()", "posts_replied_to # variable LOCATIONS = [ \" Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes", "first people to ban smoking. Nazi doctors were the first to establish a", "for post in posts: if post.id in post_ids: continue filtered_posts.append(post) return filtered_posts def", "and cruelty.\", \"It’s already a known fact that during Hitler’s reign, their main", "= randint(0, len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\")", "for him and his brothers. He called them “honorary Aryans”.\", \"In a pre-cursor", "process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc in posts_analytics with open(ANALYTICS_JSON, \"w\") as", "subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write", "no grounds for any form of animal abuse and cruelty.\", \"It’s already a", "in posts_analytics] filtered_posts = [] for post in posts: if post.id in post_ids:", "posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics) # read submission titles", "empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If we have run", "for any form of animal abuse and cruelty.\", \"It’s already a known fact", "cruelty. With the new Reich, there will be no grounds for any form", "party were the first people to ban smoking. Nazi doctors were the first", "code before, load the list of posts we have replied to else: #", "was to free the world of Jews. However, Hilter unknowingly had a Jewish", "personal chauffeur. When it got known to many, <NAME> was ready to target", "chauffeur. <NAME> was also his friend and personal chauffeur. When it got known", "his friend and personal chauffeur. When it got known to many, <NAME> was", "\"w\") as f: f.write(dumps(posts_analytics)) # filter for keywords filtered_posts = filter_posts(posts, posts_replied_to) #", "‘sweet tooth’, consuming large amounts of cake, pastries, chocolate and sugar. He sometimes", "= [post_id for post_id, _, _ in posts_analytics] filtered_posts = [] for post", "any form of animal abuse and cruelty.\", \"It’s already a known fact that", "give it up.\", \"During the Second World War, German doctors came up with", "dreams of playing a musical instrument. He had short but unsuccessful lessons in", "= nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write all keywords in lower case keywords", "is pretty amazing. The plan was to roll it out to all soldiers", "import stanza import praw import re import os from urllib.parse import quote from", "abuse and cruelty.\", \"It’s already a known fact that during Hitler’s reign, their", "remove any empty values with open(ANALYTICS_JSON, \"r\") as f: posts_analytics = loads(f.read()) return", "f.write(line + \"\\n\") if __name__ == '__main__': # log into reddit reddit =", "to many, <NAME> was ready to target Maurice for expulsion. Hitler came to", "in post_ids: continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list): keywords = [] for", "of cake, pastries, chocolate and sugar. He sometimes took as many as five", "if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If we have run the code", "post_ids: continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list): keywords = [] for sentence", "voracious ‘sweet tooth’, consuming large amounts of cake, pastries, chocolate and sugar. He", "comprehensive set of laws for animal protection. When all of these were in", "continue filtered_posts.append(post) return filtered_posts def reply_to_post(post): # Reply to the post randomnumber =", "German’s lost before it could be put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def", "# Have we run this code before? If not, create an empty list", "rescue and made an exception for him and his brothers. He called them", "= fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics) # read submission titles for post", "post in posts: if post.id in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE):", "== '__main__': # log into reddit reddit = log_into_reddit() # check posts replied", "they could march 55 miles without any tiredness which is pretty amazing. The", "first to establish a link between smoking and lung cancer which meant that", "smoking. Nazi doctors were the first to establish a link between smoking and", "quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids = [post_id", "as f: posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return", "an exception for him and his brothers. He called them “honorary Aryans”.\", \"In", "keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to): filtered_posts = [] for post in posts:", "filtered_posts.append(post) return filtered_posts def reply_to_post(post): # Reply to the post randomnumber = randint(0,", "teaspoons of sugar in his tea.\", \"When the regime came into power in", "keyword in keywords] # remove all duplicates and keep original order unique_keywords =", "code before? If not, create an empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to =", "doctors came up with a methamphetamine based experimental drug to increase soldier’s performance.", "Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int)", "log_into_reddit() # check posts replied to posts_replied_to = get_posts_replied_to() # initiate nlp nlp_pipe", "animal cruelty. With the new Reich, there will be no grounds for any", "a list and remove any empty values with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to", "nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write all keywords in lower case keywords =", "def reply_to_post(post): # Reply to the post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you", "stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int) ->", "came into power in 1933, they passed a comprehensive set of laws for", "the first people to ban smoking. Nazi doctors were the first to establish", "tough. It was found that they could march 55 miles without any tiredness", "If we have run the code before, load the list of posts we", "new Reich, there will be no grounds for any form of animal abuse", "filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list): keywords = [] for sentence in sentences:", "print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def filter_analytics(posts,", "if word.upos not in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return keywords def", "run this code before? If not, create an empty list if not os.path.isfile(\"posts_replied_to.txt\"):", "open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None,", "post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to:", "stances and laws in this area, the Nazi party were the first people", "serving in the war but the German’s lost before it could be put", "\"When the regime came into power in 1933, they passed a comprehensive set", "many as five teaspoons of sugar in his tea.\", \"When the regime came", "posts_replied_to)) return posts_replied_to # variable LOCATIONS = [ \" Sales of Hitler's political", "began under Hitler. The Nazi leadership strongly condemned smoking and advised the general", "# respond to filtered posts with open(\"posts_replied_to.txt\", \"a\") as f: for post in", "\".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def", "loads, dumps from random import randint import stanza import praw import re import", "and laws in this area, the Nazi party were the first people to", "all of these were in place, Hitler said something about animal cruelty. With", "def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If we have run", "os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If we have run the code before, load", "json import loads, dumps from random import randint import stanza import praw import", "import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): #", "population to give it up.\", \"During the Second World War, German doctors came", "list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title,", "said something about animal cruelty. With the new Reich, there will be no", "f.write(dumps(posts_analytics)) # filter for keywords filtered_posts = filter_posts(posts, posts_replied_to) # respond to filtered", "posts_analytics = [] # If we have run the code before, load the", "keep original order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote('", "open(\"posts_replied_to.txt\", \"a\") as f: for post in filtered_posts: reply_to_post(post) # store post_id in", "get_keywords_from_post(sentences: list): keywords = [] for sentence in sentences: for word in sentence.words:", "posts_analytics = loads(f.read()) return posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en',", "modern stances and laws in this area, the Nazi party were the first", "with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) # filter for keywords filtered_posts = filter_posts(posts,", "five teaspoons of sugar in his tea.\", \"When the regime came into power", "to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line + \"\\n\") if __name__ == '__main__':", "World War, German doctors came up with a methamphetamine based experimental drug to", "they passed a comprehensive set of laws for animal protection. When all of", "return posts_replied_to # variable LOCATIONS = [ \" Sales of Hitler's political autobiography", "as f: for post in filtered_posts: reply_to_post(post) # store post_id in posts_replied_to store_line(f,", "the code before, load the list of posts we have replied to else:", "into a list and remove any empty values with open(\"posts_replied_to.txt\", \"r\") as f:", "'__main__': # log into reddit reddit = log_into_reddit() # check posts replied to", "log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have we run", "keywords in lower case keywords = [keyword.lower() for keyword in keywords] # remove", "smoking and advised the general population to give it up.\", \"During the Second", "urllib.parse import quote from stanza import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me())", "print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords", "nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc in posts_analytics with open(ANALYTICS_JSON, \"w\") as f:", "[] for post in posts: if post.id in post_ids: continue filtered_posts.append(post) return filtered_posts", "to free the world of Jews. However, Hilter unknowingly had a Jewish chauffeur.", "doctors were the first to establish a link between smoking and lung cancer", "= log_into_reddit() # check posts replied to posts_replied_to = get_posts_replied_to() # initiate nlp", "[] # If we have run the code before, load the list of", "posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to # variable LOCATIONS = [ \" Sales", "sentence.words: if word.upos not in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return keywords", "and keep original order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords =", "was to roll it out to all soldiers serving in the war but", "when tested and made the troops super tough. It was found that they", "= process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc in posts_analytics with open(ANALYTICS_JSON, \"w\")", "loads(f.read()) return posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return", "nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int) -> list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit)", "establish a link between smoking and lung cancer which meant that a fierce", "if not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If we have run the code", "amounts of cake, pastries, chocolate and sugar. He sometimes took as many as", "to establish a link between smoking and lung cancer which meant that a", "processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int) -> list: subreddit = reddit.subreddit(selected_subreddit)", "Reply to the post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\")", "general population to give it up.\", \"During the Second World War, German doctors", "posts replied to posts_replied_to = get_posts_replied_to() # initiate nlp nlp_pipe = initiate_nlp() #", "a musical instrument. He had short but unsuccessful lessons in piano and violin", "= filter_posts(posts, posts_replied_to) # respond to filtered posts with open(\"posts_replied_to.txt\", \"a\") as f:", "# create posts_analytics posts_analytics = get_posts_analytics() # fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\",", "tooth’, consuming large amounts of cake, pastries, chocolate and sugar. He sometimes took", "-> list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc =", "the rescue and made an exception for him and his brothers. He called", "create posts_analytics posts_analytics = get_posts_analytics() # fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10)", "for post in analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc", "flute and harmonica. In the end, he settled for whistling, which he did", "him and his brothers. He called them “honorary Aryans”.\", \"In a pre-cursor to", "that a fierce anti-smoking campaign began under Hitler. The Nazi leadership strongly condemned", "doc in posts_analytics with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) # filter for keywords", "\"a\") as f: for post in filtered_posts: reply_to_post(post) # store post_id in posts_replied_to", "and advised the general population to give it up.\", \"During the Second World", "place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] #", "reddit reddit = log_into_reddit() # check posts replied to posts_replied_to = get_posts_replied_to() #", "as f: f.write(dumps(posts_analytics)) # filter for keywords filtered_posts = filter_posts(posts, posts_replied_to) # respond", "list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If we have run the", "\" Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to as the bible of", "reddit def get_posts_replied_to(): # Have we run this code before? If not, create", "= \"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If we", "log into reddit reddit = log_into_reddit() # check posts replied to posts_replied_to =", "was ready to target Maurice for expulsion. Hitler came to the rescue and", "that during Hitler’s reign, their main objective was to free the world of", "roll it out to all soldiers serving in the war but the German’s", "def fetch_reddit_posts(selected_subreddit: str, limit: int) -> list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def", "This was very successful in trials when tested and made the troops super", "def get_keywords_from_post(sentences: list): keywords = [] for sentence in sentences: for word in", "and remove any empty values with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to = f.read()", "However, Hilter unknowingly had a Jewish chauffeur. <NAME> was also his friend and", "= list(filter(None, posts_replied_to)) return posts_replied_to # variable LOCATIONS = [ \" Sales of", "= praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have we run this code", "ban smoking. Nazi doctors were the first to establish a link between smoking", "re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def reply_to_post(post): # Reply to the", "posts_replied_to = [] # If we have run the code before, load the", "regime came into power in 1933, they passed a comprehensive set of laws", "cake, pastries, chocolate and sugar. He sometimes took as many as five teaspoons", "Nazi doctors were the first to establish a link between smoking and lung", "posts_analytics = get_posts_analytics() # fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered =", "of laws for animal protection. When all of these were in place, Hitler", "It was found that they could march 55 miles without any tiredness which", "keywords] # remove all duplicates and keep original order unique_keywords = list(dict.fromkeys(keywords)) print(\"", "cruelty.\", \"It’s already a known fact that during Hitler’s reign, their main objective", "['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to): filtered_posts =", "to else: # Read the file into a list and remove any empty", "posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to # variable LOCATIONS = [ \"", "to all soldiers serving in the war but the German’s lost before it", "to the post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot", "if post.id in post_ids: continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list): keywords =", "laws for animal protection. When all of these were in place, Hitler said", "link between smoking and lung cancer which meant that a fierce anti-smoking campaign", "in sentence.words: if word.upos not in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return", "for expulsion. Hitler came to the rescue and made an exception for him", "could march 55 miles without any tiredness which is pretty amazing. The plan", "people to ban smoking. Nazi doctors were the first to establish a link", "into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics = []", "in posts: if post.id in post_ids: continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list):", "whistling, which he did frequently.\", \"Though he shunned meat, Hitler was a voracious", "of animal abuse and cruelty.\", \"It’s already a known fact that during Hitler’s", "Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to as the bible of the", "pastries, chocolate and sugar. He sometimes took as many as five teaspoons of", "he shunned meat, Hitler was a voracious ‘sweet tooth’, consuming large amounts of", "list and remove any empty values with open(ANALYTICS_JSON, \"r\") as f: posts_analytics =", "else: # Read the file into a list and remove any empty values", "advised the general population to give it up.\", \"During the Second World War,", "of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to as the bible of the Nazi", "\"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If we have", "return filtered_posts def get_keywords_from_post(sentences: list): keywords = [] for sentence in sentences: for", "the regime came into power in 1933, they passed a comprehensive set of", "continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list): keywords = [] for sentence in", "create an empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If we", "as f: posts_analytics = loads(f.read()) return posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe", "filtered_posts = [] for post in posts: if post.id in posts_replied_to: continue if", "soldier’s performance. This was very successful in trials when tested and made the", "from json import loads, dumps from random import randint import stanza import praw", "up with a methamphetamine based experimental drug to increase soldier’s performance. This was", "was very successful in trials when tested and made the troops super tough.", "very successful in trials when tested and made the troops super tough. It", "Read the file into a list and remove any empty values with open(ANALYTICS_JSON,", "f: posts_analytics = loads(f.read()) return posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe =", "-> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit:", "fact that during Hitler’s reign, their main objective was to free the world", "empty values with open(ANALYTICS_JSON, \"r\") as f: posts_analytics = loads(f.read()) return posts_analytics def", "in his tea.\", \"When the regime came into power in 1933, they passed", "methamphetamine based experimental drug to increase soldier’s performance. This was very successful in", "# write all keywords in lower case keywords = [keyword.lower() for keyword in", "know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line +", "In the end, he settled for whistling, which he did frequently.\", \"Though he", "He called them “honorary Aryans”.\", \"In a pre-cursor to modern stances and laws", "posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to # variable LOCATIONS =", "filter_analytics(posts, posts_analytics): post_ids = [post_id for post_id, _, _ in posts_analytics] filtered_posts =", "to the rescue and made an exception for him and his brothers. He", "he did frequently.\", \"Though he shunned meat, Hitler was a voracious ‘sweet tooth’,", "f: f.write(dumps(posts_analytics)) # filter for keywords filtered_posts = filter_posts(posts, posts_replied_to) # respond to", "the end, he settled for whistling, which he did frequently.\", \"Though he shunned", "replied to else: # Read the file into a list and remove any", "nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write all keywords in", "import quote from stanza import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return", "LOCATIONS = [ \" Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to as", "analytics_filtered = filter_analytics(posts, posts_analytics) # read submission titles for post in analytics_filtered: nlp_data", "if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def reply_to_post(post): # Reply", "empty values with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\")", "ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If", "randint import stanza import praw import re import os from urllib.parse import quote", "we have run the code before, load the list of posts we have", "the war but the German’s lost before it could be put into place.\"]", "in posts: if post.id in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue", "fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics) # read", "he settled for whistling, which he did frequently.\", \"Though he shunned meat, Hitler", "filtered_posts = filter_posts(posts, posts_replied_to) # respond to filtered posts with open(\"posts_replied_to.txt\", \"a\") as", "list of posts we have replied to else: # Read the file into", "meant that a fierce anti-smoking campaign began under Hitler. The Nazi leadership strongly", "f: posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to", "amazing. The plan was to roll it out to all soldiers serving in", "for keywords filtered_posts = filter_posts(posts, posts_replied_to) # respond to filtered posts with open(\"posts_replied_to.txt\",", "dabbled in the flute and harmonica. In the end, he settled for whistling,", "the Nazi Party, made him a millionaire. \", \"Hitler had dreams of playing", "post in posts: if post.id in post_ids: continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences:", "import randint import stanza import praw import re import os from urllib.parse import", "of playing a musical instrument. He had short but unsuccessful lessons in piano", "continue keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to): filtered_posts = [] for post in", "for post in posts: if post.id in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title,", "= [keyword.lower() for keyword in keywords] # remove all duplicates and keep original", "harmonica. In the end, he settled for whistling, which he did frequently.\", \"Though", "\"r\") as f: posts_analytics = loads(f.read()) return posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en')", "them “honorary Aryans”.\", \"In a pre-cursor to modern stances and laws in this", "\"Though he shunned meat, Hitler was a voracious ‘sweet tooth’, consuming large amounts", "were in place, Hitler said something about animal cruelty. With the new Reich,", "With the new Reich, there will be no grounds for any form of", "nlp_pipe = initiate_nlp() # create posts_analytics posts_analytics = get_posts_analytics() # fetch reddit posts", "power in 1933, they passed a comprehensive set of laws for animal protection.", "posts_analytics) # read submission titles for post in analytics_filtered: nlp_data = process_post(post, nlp_pipe)", "troops super tough. It was found that they could march 55 miles without", "referred to as the bible of the Nazi Party, made him a millionaire.", "not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def reply_to_post(post): # Reply to", "Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to as the bible of the Nazi Party,", "in piano and violin and also dabbled in the flute and harmonica. In", "store nlp doc in posts_analytics with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) # filter", "not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If we have run the code before,", "for sentence in sentences: for word in sentence.words: if word.upos not in ['NOUN',", "order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords)) print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\")", "filtered posts with open(\"posts_replied_to.txt\", \"a\") as f: for post in filtered_posts: reply_to_post(post) #", "passed a comprehensive set of laws for animal protection. When all of these", "German doctors came up with a methamphetamine based experimental drug to increase soldier’s", "\"\\n\") if __name__ == '__main__': # log into reddit reddit = log_into_reddit() #", "Hitler said something about animal cruelty. With the new Reich, there will be", "in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to): filtered_posts", "Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have", "the list of posts we have replied to else: # Read the file", "protection. When all of these were in place, Hitler said something about animal", "stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int) -> list: subreddit =", "nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int) -> list:", "reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences)", "process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write all keywords", "_, _ in posts_analytics] filtered_posts = [] for post in posts: if post.id", "in trials when tested and made the troops super tough. It was found", "and remove any empty values with open(ANALYTICS_JSON, \"r\") as f: posts_analytics = loads(f.read())", "posts: if post.id in post_ids: continue filtered_posts.append(post) return filtered_posts def get_keywords_from_post(sentences: list): keywords", "the Second World War, German doctors came up with a methamphetamine based experimental", "short but unsuccessful lessons in piano and violin and also dabbled in the", "for whistling, which he did frequently.\", \"Though he shunned meat, Hitler was a", "# Read the file into a list and remove any empty values with", "import re import os from urllib.parse import quote from stanza import Pipeline def", "Hitler’s reign, their main objective was to free the world of Jews. However,", "not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If we have run the code before,", "post_ids = [post_id for post_id, _, _ in posts_analytics] filtered_posts = [] for", "posts_analytics): post_ids = [post_id for post_id, _, _ in posts_analytics] filtered_posts = []", "with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to =", "Hitler. The Nazi leadership strongly condemned smoking and advised the general population to", "post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids = [post_id for post_id, _, _ in", "tested and made the troops super tough. It was found that they could", "Nazi party were the first people to ban smoking. Nazi doctors were the", "the file into a list and remove any empty values with open(ANALYTICS_JSON, \"r\")", "reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to(): # Have we run this", "__name__ == '__main__': # log into reddit reddit = log_into_reddit() # check posts", "strongly condemned smoking and advised the general population to give it up.\", \"During", "meat, Hitler was a voracious ‘sweet tooth’, consuming large amounts of cake, pastries,", "submission titles for post in analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store", "initiate nlp nlp_pipe = initiate_nlp() # create posts_analytics posts_analytics = get_posts_analytics() # fetch", "a pre-cursor to modern stances and laws in this area, the Nazi party", "return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int) -> list: subreddit = reddit.subreddit(selected_subreddit) return", "experimental drug to increase soldier’s performance. This was very successful in trials when", "get_posts_analytics() # fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics)", "return reddit def get_posts_replied_to(): # Have we run this code before? If not,", "[ \" Sales of Hitler's political autobiography \\\"<NAME>\\\"sometimes referred to as the bible", "animal abuse and cruelty.\", \"It’s already a known fact that during Hitler’s reign,", "lost before it could be put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics():", "\"It’s already a known fact that during Hitler’s reign, their main objective was", "filtered_posts = [] for post in posts: if post.id in post_ids: continue filtered_posts.append(post)", "'PROPN']: continue keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to): filtered_posts = [] for post", "[post_id for post_id, _, _ in posts_analytics] filtered_posts = [] for post in", "f: for post in filtered_posts: reply_to_post(post) # store post_id in posts_replied_to store_line(f, post.id)", "the Nazi party were the first people to ban smoking. Nazi doctors were", "def filter_posts(posts, posts_replied_to): filtered_posts = [] for post in posts: if post.id in", "posts with open(\"posts_replied_to.txt\", \"a\") as f: for post in filtered_posts: reply_to_post(post) # store", "get_keywords_from_post(doc.sentences) # write all keywords in lower case keywords = [keyword.lower() for keyword", "to roll it out to all soldiers serving in the war but the", "violin and also dabbled in the flute and harmonica. In the end, he", "for word in sentence.words: if word.upos not in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue", "import praw import re import os from urllib.parse import quote from stanza import", "# log into reddit reddit = log_into_reddit() # check posts replied to posts_replied_to", "without any tiredness which is pretty amazing. The plan was to roll it", "leadership strongly condemned smoking and advised the general population to give it up.\",", "subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords", "remove all duplicates and keep original order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\")", "The Nazi leadership strongly condemned smoking and advised the general population to give", "of posts we have replied to else: # Read the file into a", "filter_posts(posts, posts_replied_to): filtered_posts = [] for post in posts: if post.id in posts_replied_to:", "a fierce anti-smoking campaign began under Hitler. The Nazi leadership strongly condemned smoking", "re import os from urllib.parse import quote from stanza import Pipeline def log_into_reddit():", "the first to establish a link between smoking and lung cancer which meant", "it could be put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\" def get_posts_analytics(): if not", "# remove all duplicates and keep original order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords))", "a known fact that during Hitler’s reign, their main objective was to free", "= reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords =", "march 55 miles without any tiredness which is pretty amazing. The plan was", "= get_posts_analytics() # fetch reddit posts posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts,", "open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) # filter for keywords filtered_posts = filter_posts(posts, posts_replied_to)", "his brothers. He called them “honorary Aryans”.\", \"In a pre-cursor to modern stances", "were the first to establish a link between smoking and lung cancer which", "fierce anti-smoking campaign began under Hitler. The Nazi leadership strongly condemned smoking and", "posts we have replied to else: # Read the file into a list", "def process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write all", "Second World War, German doctors came up with a methamphetamine based experimental drug", "file into a list and remove any empty values with open(ANALYTICS_JSON, \"r\") as", "return posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe", "print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line + \"\\n\") if __name__", "_ in posts_analytics] filtered_posts = [] for post in posts: if post.id in", "remove any empty values with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to = f.read() posts_replied_to", "https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line + \"\\n\") if __name__ == '__main__': # log", "analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc in posts_analytics with", "get_posts_analytics(): if not os.path.isfile(ANALYTICS_JSON): posts_analytics = [] # If we have run the", "print(f\"https://en.wikipedia.org/w/index.php?search={URL_keywords}\") return post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids = [post_id for post_id,", "unique_keywords def filter_analytics(posts, posts_analytics): post_ids = [post_id for post_id, _, _ in posts_analytics]", "as many as five teaspoons of sugar in his tea.\", \"When the regime", "the new Reich, there will be no grounds for any form of animal", "main objective was to free the world of Jews. However, Hilter unknowingly had", "in the war but the German’s lost before it could be put into", "import os from urllib.parse import quote from stanza import Pipeline def log_into_reddit(): reddit", "political autobiography \\\"<NAME>\\\"sometimes referred to as the bible of the Nazi Party, made", "the German’s lost before it could be put into place.\"] ANALYTICS_JSON = \"posts_analytics.json\"", "increase soldier’s performance. This was very successful in trials when tested and made", "reply_to_post(post): # Reply to the post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you know", "randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title}", "randint(0, len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def", "= loads(f.read()) return posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\")", "return filtered_posts def reply_to_post(post): # Reply to the post randomnumber = randint(0, len(LOCATIONS))", "to posts_replied_to = get_posts_replied_to() # initiate nlp nlp_pipe = initiate_nlp() # create posts_analytics", "of Jews. However, Hilter unknowingly had a Jewish chauffeur. <NAME> was also his", "stanza import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def get_posts_replied_to():", "in this area, the Nazi party were the first people to ban smoking.", "def get_posts_replied_to(): # Have we run this code before? If not, create an", "tiredness which is pretty amazing. The plan was to roll it out to", "and personal chauffeur. When it got known to many, <NAME> was ready to", "posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def reply_to_post(post):", "had dreams of playing a musical instrument. He had short but unsuccessful lessons", "during Hitler’s reign, their main objective was to free the world of Jews.", "posts = fetch_reddit_posts(\"fakehistoryporn\", 10) analytics_filtered = filter_analytics(posts, posts_analytics) # read submission titles for", "return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) #", "stanza import praw import re import os from urllib.parse import quote from stanza", "post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line):", "return keywords def filter_posts(posts, posts_replied_to): filtered_posts = [] for post in posts: if", "Nazi leadership strongly condemned smoking and advised the general population to give it", "playing a musical instrument. He had short but unsuccessful lessons in piano and", "= [] # If we have run the code before, load the list", "def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit:", "reddit = log_into_reddit() # check posts replied to posts_replied_to = get_posts_replied_to() # initiate", "to as the bible of the Nazi Party, made him a millionaire. \",", "random import randint import stanza import praw import re import os from urllib.parse", "def filter_analytics(posts, posts_analytics): post_ids = [post_id for post_id, _, _ in posts_analytics] filtered_posts", "a list and remove any empty values with open(ANALYTICS_JSON, \"r\") as f: posts_analytics", "nlp doc in posts_analytics with open(ANALYTICS_JSON, \"w\") as f: f.write(dumps(posts_analytics)) # filter for", "which is pretty amazing. The plan was to roll it out to all", "drug to increase soldier’s performance. This was very successful in trials when tested", "= get_posts_replied_to() # initiate nlp nlp_pipe = initiate_nlp() # create posts_analytics posts_analytics =", "from stanza import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit def", "[] for sentence in sentences: for word in sentence.words: if word.upos not in", "Hilter unknowingly had a Jewish chauffeur. <NAME> was also his friend and personal", "1933, they passed a comprehensive set of laws for animal protection. When all", "between smoking and lung cancer which meant that a fierce anti-smoking campaign began", "fetch_reddit_posts(selected_subreddit: str, limit: int) -> list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post,", "came to the rescue and made an exception for him and his brothers.", "end, he settled for whistling, which he did frequently.\", \"Though he shunned meat,", "[keyword.lower() for keyword in keywords] # remove all duplicates and keep original order", "piano and violin and also dabbled in the flute and harmonica. In the", "posts_analytics def initiate_nlp() -> Pipeline: stanza.download('en') nlp_pipe = stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def", "If not, create an empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] #", "a millionaire. \", \"Hitler had dreams of playing a musical instrument. He had", "list and remove any empty values with open(\"posts_replied_to.txt\", \"r\") as f: posts_replied_to =", "<NAME> was also his friend and personal chauffeur. When it got known to", "re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def reply_to_post(post): # Reply to the post randomnumber", "if post.id in posts_replied_to: continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return", "in the flute and harmonica. In the end, he settled for whistling, which", "tea.\", \"When the regime came into power in 1933, they passed a comprehensive", "instrument. He had short but unsuccessful lessons in piano and violin and also", "exception for him and his brothers. He called them “honorary Aryans”.\", \"In a", "took as many as five teaspoons of sugar in his tea.\", \"When the", "not in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return keywords def filter_posts(posts, posts_replied_to):", "\", \"Hitler had dreams of playing a musical instrument. He had short but", "War, German doctors came up with a methamphetamine based experimental drug to increase", "nlp nlp_pipe = initiate_nlp() # create posts_analytics posts_analytics = get_posts_analytics() # fetch reddit", "{post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line + \"\\n\") if __name__ == '__main__': #", "write all keywords in lower case keywords = [keyword.lower() for keyword in keywords]", "condemned smoking and advised the general population to give it up.\", \"During the", "in sentences: for word in sentence.words: if word.upos not in ['NOUN', 'VERB', 'NUM',", "list(filter(None, posts_replied_to)) return posts_replied_to # variable LOCATIONS = [ \" Sales of Hitler's", "import loads, dumps from random import randint import stanza import praw import re", "for post_id, _, _ in posts_analytics] filtered_posts = [] for post in posts:", "made an exception for him and his brothers. He called them “honorary Aryans”.\",", "list: subreddit = reddit.subreddit(selected_subreddit) return subreddit.hot(limit=limit) def process_post(post, nlp_pipe: Pipeline): doc = nlp_pipe(post.title)", "that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line + \"\\n\")", "replied to posts_replied_to = get_posts_replied_to() # initiate nlp nlp_pipe = initiate_nlp() # create", "about animal cruelty. With the new Reich, there will be no grounds for", "but unsuccessful lessons in piano and violin and also dabbled in the flute", "sometimes took as many as five teaspoons of sugar in his tea.\", \"When", "Pipeline): doc = nlp_pipe(post.title) keywords = get_keywords_from_post(doc.sentences) # write all keywords in lower", "into a list and remove any empty values with open(ANALYTICS_JSON, \"r\") as f:", "also dabbled in the flute and harmonica. In the end, he settled for", "= [] for post in posts: if post.id in post_ids: continue filtered_posts.append(post) return", "have run the code before, load the list of posts we have replied", "as the bible of the Nazi Party, made him a millionaire. \", \"Hitler", "post_id, _, _ in posts_analytics] filtered_posts = [] for post in posts: if", "with open(\"posts_replied_to.txt\", \"a\") as f: for post in filtered_posts: reply_to_post(post) # store post_id", "all soldiers serving in the war but the German’s lost before it could", "= filter_analytics(posts, posts_analytics) # read submission titles for post in analytics_filtered: nlp_data =", "bible of the Nazi Party, made him a millionaire. \", \"Hitler had dreams", "reign, their main objective was to free the world of Jews. However, Hilter", "target Maurice for expulsion. Hitler came to the rescue and made an exception", "post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids = [post_id for post_id, _, _", "lessons in piano and violin and also dabbled in the flute and harmonica.", "we run this code before? If not, create an empty list if not", "and sugar. He sometimes took as many as five teaspoons of sugar in", "had a Jewish chauffeur. <NAME> was also his friend and personal chauffeur. When", "check posts replied to posts_replied_to = get_posts_replied_to() # initiate nlp nlp_pipe = initiate_nlp()", "lung cancer which meant that a fierce anti-smoking campaign began under Hitler. The", "based experimental drug to increase soldier’s performance. This was very successful in trials", "sugar in his tea.\", \"When the regime came into power in 1933, they", "case keywords = [keyword.lower() for keyword in keywords] # remove all duplicates and", "also his friend and personal chauffeur. When it got known to many, <NAME>", "replying to: {post.title} https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") def store_line(f, line): f.write(line + \"\\n\") if __name__ ==", "and lung cancer which meant that a fierce anti-smoking campaign began under Hitler.", "friend and personal chauffeur. When it got known to many, <NAME> was ready", "anti-smoking campaign began under Hitler. The Nazi leadership strongly condemned smoking and advised", "area, the Nazi party were the first people to ban smoking. Nazi doctors", "quote from stanza import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1') print(reddit.user.me()) return reddit", "nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc in posts_analytics with open(ANALYTICS_JSON,", "Party, made him a millionaire. \", \"Hitler had dreams of playing a musical", "post in analytics_filtered: nlp_data = process_post(post, nlp_pipe) posts_analytics.append(nlp_data) # store nlp doc in", "Hitler was a voracious ‘sweet tooth’, consuming large amounts of cake, pastries, chocolate", "filter for keywords filtered_posts = filter_posts(posts, posts_replied_to) # respond to filtered posts with", "= stanza.Pipeline('en', processors=\"tokenize,pos\") return nlp_pipe def fetch_reddit_posts(selected_subreddit: str, limit: int) -> list: subreddit", "return post.id, post.title, unique_keywords def filter_analytics(posts, posts_analytics): post_ids = [post_id for post_id, _,", "a link between smoking and lung cancer which meant that a fierce anti-smoking", "He had short but unsuccessful lessons in piano and violin and also dabbled", "lower case keywords = [keyword.lower() for keyword in keywords] # remove all duplicates", "posts_replied_to = f.read() posts_replied_to = posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to #", "which meant that a fierce anti-smoking campaign began under Hitler. The Nazi leadership", "up.\", \"During the Second World War, German doctors came up with a methamphetamine", "an empty list if not os.path.isfile(\"posts_replied_to.txt\"): posts_replied_to = [] # If we have", "word.upos not in ['NOUN', 'VERB', 'NUM', 'PROPN']: continue keywords.append(word.text) return keywords def filter_posts(posts,", "posts_replied_to): filtered_posts = [] for post in posts: if post.id in posts_replied_to: continue", "to filtered posts with open(\"posts_replied_to.txt\", \"a\") as f: for post in filtered_posts: reply_to_post(post)", "from urllib.parse import quote from stanza import Pipeline def log_into_reddit(): reddit = praw.Reddit('bot1')", "to modern stances and laws in this area, the Nazi party were the", "original order unique_keywords = list(dict.fromkeys(keywords)) print(\" \".join(unique_keywords)) print(f\"{post.title}\") print(f\"https://www.reddit.com/r/fakehistoryporn/comments/{post.id}\") URL_keywords = quote(' '.join(unique_keywords))", "continue if not re.search(\"(nazi|hitler|hilter|german)\", post.title, re.IGNORECASE): continue filtered_posts.append(post) return filtered_posts def reply_to_post(post): #", "which he did frequently.\", \"Though he shunned meat, Hitler was a voracious ‘sweet", "it got known to many, <NAME> was ready to target Maurice for expulsion.", "= get_keywords_from_post(doc.sentences) # write all keywords in lower case keywords = [keyword.lower() for", "= posts_replied_to.split(\"\\n\") posts_replied_to = list(filter(None, posts_replied_to)) return posts_replied_to # variable LOCATIONS = [", "# Reply to the post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you know that:", "of these were in place, Hitler said something about animal cruelty. With the", "+ \"\\n\") if __name__ == '__main__': # log into reddit reddit = log_into_reddit()", "the post randomnumber = randint(0, len(LOCATIONS)) post.reply(f\"Did you know that: {LOCATIONS[randomnumber]}\") print(f\"Bot replying", "chauffeur. When it got known to many, <NAME> was ready to target Maurice", "him a millionaire. \", \"Hitler had dreams of playing a musical instrument. He", "in lower case keywords = [keyword.lower() for keyword in keywords] # remove all", "Reich, there will be no grounds for any form of animal abuse and", "the bible of the Nazi Party, made him a millionaire. \", \"Hitler had", "When it got known to many, <NAME> was ready to target Maurice for", "super tough. It was found that they could march 55 miles without any", "keywords = get_keywords_from_post(doc.sentences) # write all keywords in lower case keywords = [keyword.lower()", "# filter for keywords filtered_posts = filter_posts(posts, posts_replied_to) # respond to filtered posts", "autobiography \\\"<NAME>\\\"sometimes referred to as the bible of the Nazi Party, made him" ]
[ "in input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] #", "as rd from copy import deepcopy import torch import torch.nn as nn import", "# tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1)", "f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config =", "= t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) ==", "for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states", "new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) #", "= t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1]", "assume primary key have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else:", "\"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1", "column or table to column (we choose latter one) base = len(nodes) nodes", "nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks", "current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if", "orig_to_tok_idx[i] = start index of i-th-1st-level-token in all_tokens. nlu_tt1 = [] # all_doc_tokens[", "as value # take redundancy for foreign key if col in columns: #", "all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0)", "index of header in node - len(nodes) columns = {} nodes = []", "segment_ids = [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to use", "for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1 schema_token_states1 = [] for i in", "print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h] = ', wemb_h.size()) b_pu = -1 for", "start and end indices of question in tokens # i_hds: start and end", "dependency nodes=relations=new_schema=None if bert_input_version == 'v1': nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence,", "+= [i.lower() for i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence,", "(i_noln + 1) * hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1],", "current_hds1 + [hds1] # tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1)", "= [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] # get hidden layer output as representation", "5. generate l_hpu from i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and", "= all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer,", "all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx,", "in columns: if not header in tables: tables.append(header) tb_name[header] = len(tables) -1 #columns[col]=", "all_hds = input_schema.column_names_embedder_input # table name . column tables = [] tb_name =", "all_input_mask) # 5. generate l_hpu from i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) ==", "i-th-1st-level-token for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the start", "generate l_hpu from i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and len(set(i_nlu))", "== len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The mask has", "tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i]))", "related columns), inner list corresponds to edge type \"\"\" nlu_t = [] hds", "all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT output.", "# only get graph encoding without input_sequence dependency nodes=relations=new_schema=None if bert_input_version == 'v1':", "= input_schema.column_names_embedder_input nlu_tt1 = [] for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token)", "feed into model masks = [] new_schema = [] for i in range(len(nodes)):", "len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert len(input_mask1) ==", "model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens = [] segment_ids = []", "= tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = []", "generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) #", "= input_schema.relations # TODO: feed into gnn and return embedding # print(relations) #", "primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name correspond to foreign key if len(nodes)-1 in", "hS ed = (i_noln + 1) * hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed]", "update new schema new_schema = input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form", "l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert", "for padding tokens. Only real # tokens are attended to. input_mask1 = [1]", "= {} # index of header in node - len(nodes) columns = {}", "l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device)", "t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx", "edge type \"\"\" nlu_t = [] hds = [] nlu_t1 = input_sequence #", "tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu =", "get graph encoding without input_sequence dependency nodes=relations=new_schema=None if bert_input_version == 'v1': nodes, relations,", "else: # end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) == len(input_sequence)", "+= 1 for i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers - 1 - i_nolh", "segment_ids.append(0) i_hds = [] for i, hds11 in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11", "len(nodes): new_schema = input_schema.column_names_surface_form + tables # if len(new_schema) != len(nodes): # print(new_schema,nodes,", "zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for", "in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1)", "[0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations)", "for real tokens and 0 for padding tokens. Only real # tokens are", "current_hds1 = [] # for hds1 in all_hds: # new_hds1 = current_hds1 +", "i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise", "[i for i in gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h, l_n, l_hpu, l_hs,", "language questions orig_to_tok_index: map the index of 1st-level-token to the index of 2nd-level-token", "[SEP] col2 [SEP] ...col-n [SEP] # 2. Generate BERT inputs & indices. tokens1,", "len(nodes) > 1 assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) #", "later segment_ids.append(0) for token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0)", "# else: # end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) ==", "len(nodes) nodes += tables for i in relations: for j in i: j[0]", "i_noln in range(num_out_layers_n): i_layer = num_hidden_layers - 1 - i_noln st = i_noln", "l_n_max, hS * num_out_layers_n]).to(device) for b in range(bS): # [B, max_len, dim] #", "# hds.append(current_hds1) # current_hds1 = [hds1] # else: # current_hds1 = new_hds1 #", "new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence,", "# print(nodes,relations) # for (i, token) in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) #", "- i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\ l_n, l_hpu, l_hs, \\", "[] cnt = -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt", "hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph encoding without input_sequence dependency", "(i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat", "len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name correspond to foreign key if", "dim] # Fill zero for non-exist part. l_n1 = l_n[b] i_nlu1 = i_nlu[b]", "18), (19, 21), (22, 23), (24, 25), (26, 29), (30, 34)]) \"\"\" l_hpu", "hS * num_out_layers_h] = ', wemb_h.size()) b_pu = -1 for b, i_hds1 in", "= len(nodes) nodes += tables relations[0] = relations[0] #column relations[1] = [[i,j] for", "use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form", "model_bert = BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device)", "+= tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids +=", "all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0])", "l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length,", "i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return", "i_nlu = [] # index to retreive the position of contextual vector later.", "with batch-size = # of columns * # of batch_size i_hds = [(17,", "input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items) Return: relations(lists of list", "len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return utterance_states, schema_token_states, relations else:", "all_encoder_layer.size()) output = [i for i in gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h,", "= [] for i, hds11 in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 = []", "tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here, input is toknized further by WordPiece (WP)", "in tokens # i_hds: start and end indices of headers # get the", "wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if [", "nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) # relations = input_schema.relations # TODO:", "= max(l_n) # print('wemb_n: [bS, l_n_max, hS * num_out_layers_n] = ', bS, l_n_max,", "= [] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens +=", "assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input)", "torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for b in range(bS): # [B, max_len, dim]", "for j in i: j[0] += base # tokenize nodes to feed into", "(19, 21), (22, 23), (24, 25), (26, 29), (30, 34)]) \"\"\" l_hpu =", "all_hds: # print(i.split('.')) if i != \"*\" and len(i.split('.')) > 1: header,col =", "relations[0] #column relations[1] = [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for", "for i in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for", "base = len(nodes) nodes += tables for i in relations: for j in", ",use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn above final output layer #add", "output layer #add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn:", "nlu_tt = [] t_to_tt_idx = [] tt_to_t_idx = [] t_to_tt_idx_hds = [] for", "parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens = []", "+= base # tokenize nodes to feed into model masks = [] ##", "get hidden layer output as representation for each schema items relations = [torch.tensor(i,", "tokens are attended to. input_mask1 = [1] * len(input_ids1) # 3. Zero-pad up", "i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers - 1 - i_nolh st = i_nolh", "nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return:", "== 1 assert l_n[0] == i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu,", "4. Generate BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate", "tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1", "= map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb ==", "in columns: # find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key", "= [] t_to_tt_idx = [] tt_to_t_idx = [] t_to_tt_idx_hds = [] for b,", "for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt += 1 # schema_token_states1 = [] #", "to retreive the position of contextual vector later. i_hds = [] doc_tokens =", "# print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for (i, token) in enumerate(nlu_t1): #", "bert_input_version == 'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n,", "nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token segement of i-th-1st-level-token", "1 - i_nolh st = i_nolh * hS ed = (i_nolh + 1)", "start index of i-th-1st-level-token in all_tokens. nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i] ]", "+= 1 # schema_token_states1 = [] # for i in range(len(t_to_tt_idx_hds11)): # start", "tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1 # add column name to columns with", "i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt]", "gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds", "vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint',", "= generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1)", "# if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema def", "== 'cL' or params.bert_type_abb == 'mcS': do_lower_case = False else: do_lower_case = True", "for i in input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations =", "all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1,", "input_schema.column_names_embedder_input # table name . column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for (i,", "return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if", "all_encoder_layer] # get hidden layer output as representation for each schema items relations", "(we choose latter one) base = len(nodes) nodes += tables relations[0] = relations[0]", "[] t_to_tt_idx = [] tt_to_t_idx = [] t_to_tt_idx_hds = [] for b, nlu_t1", "if torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12',", "t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t,", "prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items) Return: relations(lists", "tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1 [SEP] col2 [SEP]", "def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1,", "[] for i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end", "bert_input_version == 'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version ==", "# if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 = [hds1]", "len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return tokens, segment_ids,", "WP-tokenized input natural language questions orig_to_tok_index: map the index of 1st-level-token to the", "(here, CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i] = start index of i-th-1st-level-token in", "tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t = [] hds = []", "for i_hds1 in i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu", "all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds: # print(i.split('.')) if", "enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization using WordPiece tt_to_t_idx1 =", "in all_hds: # print(i.split('.')) if i != \"*\" and len(i.split('.')) > 1: header,col", "i for i,j in input_schema.table_schema['foreign_keys']] primary_idx = [ i for i in input_schema.table_schema['primary_keys']]", "== len(input_schema.column_names_embedder_input) # utterance_states = [] # for i in range(len(t_to_tt_idx)): # start", "max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert len(input_mask1) == max_seq_length assert", "or params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS': do_lower_case = False else: do_lower_case", "in t_to_tt_idx_hds1: cnt += 1 schema_token_states1 = [] for i in range(len(t_to_tt_idx_hds11)): start", "new_schema = input_schema.column_names_surface_form + tables # if len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema))", "enumerate(i_hds1): b_pu += 1 for i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers - 1", "tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat columns as if", "BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate l_hpu from", "and end indices of question in tokens # i_hds: start and end indices", "l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1,", "The mask has 1 for real tokens and 0 for padding tokens. Only", "all_encoder_layer: BERT outputs from all layers. # pooled_output: output of [CLS] vec. #", "for hds1 in all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1) else:", "enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) # current_hds1 = [] # for hds1 in", "else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length,", "(i_nolh + 1) * hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b,", "# print(header,col) # first add headers nodes.append(i) # if not col in columns:", "pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate l_hpu from i_hds l_hpu =", "= input_schema.column_names_surface_form # table name.column tables = [] tb_name = {} # index", "[] ## update new schema new_schema = input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema", "# masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] =", "relations[0].append([tb_name[header],columns[col]]) # find table name correspond to foreign key if len(nodes)-1 in foreign_idx:", "only get graph encoding without input_sequence dependency nodes=relations=new_schema=None if bert_input_version == 'v1': nodes,", "i_hds: start and end indices of headers # get the wemb wemb_n =", "end = l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split())", "i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\"", "relations] # print(333333,relations, all_encoder_layer.size()) output = [i for i in gnn(all_encoder_layer,relations)] # print(output)", "num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for b in range(bS): #", "l_n[0] # else: # end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states)", "sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS", "# nodes += tables base = len(nodes) nodes += tables for i in", "tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer: BERT outputs from all layers. # pooled_output:", "* num_out_layers_n]).to(device) for b in range(bS): # [B, max_len, dim] # Fill zero", "* (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) #", "max_seq_length = 0 nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 = [] for", "= [] doc_tokens = [] nlu_tt = [] t_to_tt_idx = [] tt_to_t_idx =", "retreive the position of contextual vector later. i_hds = [] doc_tokens = []", "= tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint,", "in all_tokens. nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token segement", ".bert.modeling import BertConfig, BertModel device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def get_bert(params):", "hds1 in all_hds: # new_hds1 = current_hds1 + [hds1] # tokens1, segment_ids1, i_nlu1,", "# tokens: BERT intput tokens # i_nlu: start and end indices of question", "if len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form + tables # if len(new_schema) !=", "assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to", "generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length", "= [] # all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token segement of i-th-1st-level-token for", "are further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu", "tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device)", "to use it later segment_ids.append(0) for token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu =", "t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states =", "and len(set(i_nlu)) == 1 assert l_n[0] == i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output,", "assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states = [] # for i in range(len(t_to_tt_idx)):", "tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) #", "- i_nolh st = i_nolh * hS ed = (i_nolh + 1) *", "= hds1_table if len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer,", "<=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1 assert len(relations[0]) > 0 #", "'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs,", "hds.append(current_hds1) current_hds1 = [hds1] current_table = hds1_table if len(current_hds1) > 0: tokens1, segment_ids1,", "input_schema) # relations = input_schema.relations # TODO: feed into gnn and return embedding", "t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1", "b_pu += 1 for i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers - 1 -", "import os, json import random as rd from copy import deepcopy import torch", "of original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token)", "utterance with batch-size = # of columns * # of batch_size i_hds =", "print(header,col) # first add headers nodes.append(i) # if not col in columns: if", "question all_hds = input_schema.column_names_surface_form # table name.column tables = [] tb_name = {}", "# print(333333,relations, all_encoder_layer.size()) output = [i for i in gnn(all_encoder_layer,relations)] # print(output) #", ":param model_bert: :param tokenizer: WordPiece toknizer :param nlu: Question :param nlu_t: CoreNLP tokenized", "Question :param nlu_t: CoreNLP tokenized nlu. :param hds: Headers :param hs_t: None or", "len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length", "in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1: # end", "== 'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) # relations", "= [[],[],[]] # three edge types, we use tb_name.col as embedding # print(relations)", "== len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] # else: # end = t_to_tt_idx_hds11[i+1] #", "[SEP] col1 [SEP] col2 [SEP] ...col-n [SEP] # 2. Generate BERT inputs &", "= l_n[0] # else: # end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert", "key if col in columns: # find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) #", "nlu. :param hds: Headers :param hs_t: None or 1st-level tokenized headers :param max_seq_length:", "for i in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) >", "= torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in", "segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert len(input_mask1) == max_seq_length assert len(segment_ids1) == max_seq_length", "t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the representation of", "== len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True,", "nodes = [] relations = [[],[],[]] # three edge types, we use tb_name.col", "= [] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_embedder_input # table", "len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1 assert len(relations[0]) > 0", "from i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and len(set(i_nlu)) == 1", "nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1 [SEP] col2 [SEP] ...col-n", "header,col = i.split('.') # if col.strip() != '*': # print(header,col) # first add", "num_out_layers_n=1, num_out_layers_h=1): # get contextual output of all tokens from bert all_encoder_layer, pooled_output,", "= [] i_nlu = [] # index to retreive the position of contextual", "name correspond to foreign key if len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] =", "len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return utterance_states, schema_token_states,", "== max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor all_input_ids", "col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for * # nodes", "# print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0]", "output of all tokens from bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu,", "input_schema.relations # TODO: feed into gnn and return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes)", "+ [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks,", "= [] # cnt = -1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for", "BERT. INPUT :param model_bert: :param tokenizer: WordPiece toknizer :param nlu: Question :param nlu_t:", "dim=[0,1])) # assert len(utterance_states) == len(input_sequence) # schema_token_states = [] # cnt =", "# schema_token_states1 = [] # for i in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i]", "else: # column id columns[col] = len(nodes) -1 # assume primary key have", "assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0)", "token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the start position of original", "# print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h] = ', wemb_h.size()) b_pu = -1", "# get hidden layer output as representation for each schema items relations =", "l_n[b] i_nlu1 = i_nlu[b] for i_noln in range(num_out_layers_n): i_layer = num_hidden_layers - 1", "t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt += 1 # schema_token_states1 = [] # for", "# NOTE: add gnn above final output layer #add input schema table #", "l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers,", "[] tokens = [] segment_ids = [] input_mask = [] i_nlu = []", "# print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input", "if not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i in nodes] masks =", "\"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,]", "new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i])", "random as rd from copy import deepcopy import torch import torch.nn as nn", "# end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) #", "len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512,", "all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds", "num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1", "nlu_t, hds, max_seq_length): \"\"\" Here, input is toknized further by WordPiece (WP) tokenizer", "tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1))", "tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for i, hds11 in enumerate(hds1): i_st_hd = len(tokens)", "= relations[0] #column relations[1] = [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn", "max_seq_length) # all_encoder_layer: BERT outputs from all layers. # pooled_output: output of [CLS]", "input_schema.column_names_embedder_input nlu_tt1 = [] for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1", "## NOTE: foreign key relation can be column to column or table to", "= torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids,", "# for * # nodes += tables base = len(nodes) nodes += tables", "in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign key relation can", "# for i in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] # if i ==", "inputs & indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert", "for each batch input_ids = [] tokens = [] segment_ids = [] input_mask", "= [] # for hds1 in all_hds: # new_hds1 = current_hds1 + [hds1]", "num_out_layers_h=1): # NOTE: add gnn above final output layer #add input schema table", "num_out_layers_n): \"\"\" Get the representation of each tokens. \"\"\" bS = len(l_n) l_n_max", "len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1)", "= [] # print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1", "# The mask has 1 for real tokens and 0 for padding tokens.", "add headers nodes.append(i) # if not col in columns: if not header in", "] \"\"\" bS = len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h =", "tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for (i, token) in enumerate(nlu_t1):", "start = t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] #", "f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file)", "tokenization from .bert.modeling import BertConfig, BertModel device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")", "torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in", "print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i]", "- i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here, input", "for token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds =", "if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1 in", "* len(input_ids1) # 3. Zero-pad up to the sequence length. if len(nlu_t) ==", "'v1': nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version ==", "print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status()", "nlu [SEP] col1 [SEP] col2 [SEP] ...col-n [SEP] # 2. Generate BERT inputs", "for i in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in all_encoder_layer] #", "hds, max_seq_length) # all_encoder_layer: BERT outputs from all layers. # pooled_output: output of", "torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate", "returns first sub-token segement of i-th-1st-level-token for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1))", "= input_schema.column_names_surface_form + tables # if len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert", "Only real # tokens are attended to. input_mask1 = [1] * len(input_ids1) #", "len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return utterance_states, schema_token_states, relations else: return utterance_states, schema_token_states", "def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t = [] hds = [] nlu_t1 =", "add foreign key relation # relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col] = len(nodes)", "None or 1st-level tokenized headers :param max_seq_length: max input token length OUTPUT tokens:", "hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd,", "in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1 assert", "\"\"\" Here, input is toknized further by WordPiece (WP) tokenizer and fed into", "# [CLS] nlu [SEP] col1 [SEP] col2 [SEP] ...col-n [SEP] # 2. Generate", "in all_hds: new_hds1 = current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 =", "= [hds1] current_table = hds1_table if len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1, i_hds1,", "i, hds11 in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 = [] sub_tok = []", "= max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): #", "# cnt += 1 # schema_token_states1 = [] # for i in range(len(t_to_tt_idx_hds11)):", "intput tokens # i_nlu: start and end indices of question in tokens #", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb", "-1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation else: # column id", "# get the wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h", "schema_token_states = [] cnt = -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in", "print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign key relation can be column to", "with index in nodes as value # take redundancy for foreign key if", "print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i", "= [] for b, nlu_t1 in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) # 1.", "of columns * # of batch_size i_hds = [(17, 18), (19, 21), (22,", "1: max_seq_length = len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1)", "== 1: max_seq_length = len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert", "len(input_ids1) # 3. Zero-pad up to the sequence length. if len(nlu_t) == 1:", "BertModel device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param'", "+= [1] * len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i ==", "if i == len(t_to_tt_idx)-1: end = l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1]))", "= [ i for i,j in input_schema.table_schema['foreign_keys']] primary_idx = [ i for i", "in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i", "model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] # assert", "for each schema items relations = [torch.tensor(i, dtype=torch.long).to(device) for i in relations] #", "+= 1 schema_token_states1 = [] for i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if", "i,j in input_schema.table_schema['foreign_keys']] primary_idx = [ i for i in input_schema.table_schema['primary_keys']] foreign_key =", "# assert len(utterance_states) == len(input_sequence) # schema_token_states = [] # cnt = -1", "print(output) # wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config,", "if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1 assert len(relations[0]) >", "deepcopy import torch import torch.nn as nn import torch.nn.functional as F from .gated_graph_conv", "in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end = l_n[0] else:", "1 # schema_token_states1 = [] # for i in range(len(t_to_tt_idx_hds11)): # start =", "Generate BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate l_hpu", "i-th-1st-level-token in all_tokens. nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token", "torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h] =", "from bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx,", "not header in tables: tables.append(header) tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1 # add", "bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn above final output", "later. i_hds = [] doc_tokens = [] nlu_tt = [] t_to_tt_idx = []", "map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1):", "name . column tables = [] tb_name = {} # index of header", "all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1):", "init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file)", "# Input masks # The mask has 1 for real tokens and 0", "print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert =", "for i in all_hds: # print(i.split('.')) if i != \"*\" and len(i.split('.')) >", "[] nlu_tt = [] t_to_tt_idx = [] tt_to_t_idx = [] t_to_tt_idx_hds = []", "nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h)", "for i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length)", "l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2, ...],", "len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i in", "i_hds, \\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n,", "= tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The mask has 1 for real tokens", "print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for (i, token) in enumerate(nlu_t1): # nlu_tt1", "return wemb_n, wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer,", "- 1 - i_noln st = i_noln * hS ed = (i_noln +", "input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert len(input_mask1) == max_seq_length assert len(segment_ids1) ==", "input tokens nlu_tt: WP-tokenized input natural language questions orig_to_tok_index: map the index of", "* (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for", "dtype=torch.long).to(device) for i in relations] # print(333333,relations, all_encoder_layer.size()) output = [i for i", "1. 2nd tokenization using WordPiece tt_to_t_idx1 = [] # number indicates where sub-token", "hds = [] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_surface_form #", "token) in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) # current_hds1 = [] # for", "l_hpu_max, hS * num_out_layers_h] = ', wemb_h.size()) b_pu = -1 for b, i_hds1", "nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length def", "generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else:", "[ i for i,j in input_schema.table_schema['foreign_keys']] primary_idx = [ i for i in", "gen_l_hpu(i_hds): \"\"\" # Treat columns as if it is a batch of natural", "wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS *", "i in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1: #", "= [] tb_name = {} # index of header in node - len(nodes)", "# segmented question all_hds = input_schema.column_names_embedder_input # table name . column tables =", "all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze())", "table name.column tables = [] tb_name = {} # index of header in", "i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t,", "t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if", "os, json import random as rd from copy import deepcopy import torch import", "{'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb]", "len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1 = new_hds1 if", "in range(bS): # [B, max_len, dim] # Fill zero for non-exist part. l_n1", "[t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ] \"\"\" bS = len(l_hs) l_hpu_max =", "if it is a batch of natural language utterance with batch-size = #", "# relations = input_schema.relations # TODO: feed into gnn and return embedding #", "else: # current_hds1 = new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1) #", "print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1 in foreign_idx or len(nodes)-1 in primary_idx):", "in relations] # print(333333,relations, all_encoder_layer.size()) output = [i for i in gnn(all_encoder_layer,relations)] #", "i in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j", "start = t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end =", "l_hs.append(len(hds1)) # 1. 2nd tokenization using WordPiece tt_to_t_idx1 = [] # number indicates", "[] current_table = '' for hds1 in all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table", "b, nlu_t1 in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization using", "# print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three edge types, we use tb_name.col as", "= [] sub_tok = [] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1)", "for b in range(bS): # [B, max_len, dim] # Fill zero for non-exist", "... [t2-c1-t1, ...,] ] \"\"\" bS = len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds =", "[i.lower() for i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema,", "= i_nolh * hS ed = (i_nolh + 1) * hS wemb_h[b_pu, 0:(i_hds11[1]", "print('wemb_n: [bS, l_n_max, hS * num_out_layers_n] = ', bS, l_n_max, hS * num_out_layers_n)", "[] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_embedder_input # table name", "questions orig_to_tok_index: map the index of 1st-level-token to the index of 2nd-level-token tok_to_orig_index:", "0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds, l_hpu,", "type \"\"\" nlu_t = [] hds = [] nlu_t1 = input_sequence # segmented", "tables for i in relations: for j in i: j[0] += base #", "f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer(", "= max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table = hds1_table if len(current_hds1)", "nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = [] hds = []", "the index of 1st-level-token to the index of 2nd-level-token tok_to_orig_index: inverse map. \"\"\"", "+= base # tokenize nodes to feed into model masks = [] new_schema", "# nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 = [hds1] # else: # current_hds1 =", "and end indices of headers # get the wemb wemb_n = get_wemb_n(i_nlu, l_n,", "== max_seq_length assert len(input_mask1) == max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1)", "exit(0) for i in relations: for j in i: j[0] += base #", "nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert", "b in range(bS): # [B, max_len, dim] # Fill zero for non-exist part.", "for * # nodes += tables base = len(nodes) nodes += tables for", "nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks =", "if i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0))", "+= input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i in input_schema.table_schema['table_names_original']] nodes,", "nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx) ==", "t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] # else: #", "INPUT :param model_bert: :param tokenizer: WordPiece toknizer :param nlu: Question :param nlu_t: CoreNLP", "dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states = [] cnt = -1 for t_to_tt_idx_hds1", "== i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\ l_n, l_hpu,", "= tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i])", "primary_idx = [ i for i in input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx)", "[] # print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 =", "i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\ l_n, l_hpu, l_hs,", "for i,j in input_schema.table_schema['foreign_keys']] primary_idx = [ i for i in input_schema.table_schema['primary_keys']] foreign_key", "all_doc_tokens are further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS]", "nlu_t = [] hds = [] max_seq_length = 0 nlu_t1 = input_sequence all_hds", "= [] for i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i])", "i_nolh * hS ed = (i_nolh + 1) * hS wemb_h[b_pu, 0:(i_hds11[1] -", "to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids,", "of each tokens. \"\"\" bS = len(l_n) l_n_max = max(l_n) # print('wemb_n: [bS,", "max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn above final output layer #add input", "all layers. # pooled_output: output of [CLS] vec. # tokens: BERT intput tokens", "= [] # number indicates where sub-token belongs to in 1st-level-tokens (here, CoreNLP).", "# schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states)", "= t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states = [] cnt =", "= {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type =", "outputs from all layers. # pooled_output: output of [CLS] vec. # tokens: BERT", "= [] for i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1:", "hS * num_out_layers_n] = ', bS, l_n_max, hS * num_out_layers_n) wemb_n = torch.zeros([bS,", "all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) #", "t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx =", "dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT output. all_encoder_layer, pooled_output =", "+ [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) #", "else \"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16',", "nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations)", "dtype=torch.long).to(device) # 4. Generate BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) #", "import torch.nn.functional as F from .gated_graph_conv import GatedGraphConv from .bert import tokenization as", "vector later. i_hds = [] doc_tokens = [] nlu_tt = [] t_to_tt_idx =", "= generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds,", "hds1_table == current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1,", "t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1: # end = l_n[0] # else: #", "prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': raise(\"not inplemented\") nlu_t, hds,", "l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds,", "assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1',", "prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks = [] new_schema = [] for i in", "t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The mask has 1 for", "if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for * #", "= ', wemb_h.size()) b_pu = -1 for b, i_hds1 in enumerate(i_hds): for b1,", "0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length,", "1 - i_noln st = i_noln * hS ed = (i_noln + 1)", "l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n,", "enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the start position of original 'white-space' tokens.", "[1] * len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1:", "1 for i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers - 1 - i_nolh st", "table to column (we choose latter one) base = len(nodes) nodes += tables", "end = l_n[0] # else: # end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) #", "l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu, l_hs, \\", "[hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) >", "print(relations) # print(nodes,relations) # for (i, token) in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token)", "in i: j[0] += base # tokenize nodes to feed into model masks", "part. l_n1 = l_n[b] i_nlu1 = i_nlu[b] for i_noln in range(num_out_layers_n): i_layer =", "[] for b, nlu_t1 in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) # 1. 2nd", "get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer: BERT outputs from all layers. #", "bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx,", "input_sequence, input_schema): nlu_t = [] hds = [] max_seq_length = 0 nlu_t1 =", "all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds", "params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS': do_lower_case =", "input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert len(input_mask1) == max_seq_length assert len(segment_ids1)", "nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] current_table = '' for hds1 in all_hds:", "= [] segment_ids = [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) #", "= l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1)", "wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema,", "all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] # get hidden layer output as", "[] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to use it later", "bert_input_version == 'v1': nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif", "['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations)", "# else: # current_hds1 = new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1)", "relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1 in foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]])", "max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1 = new_hds1 if len(current_hds1) >", "> 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\"", "t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer: BERT outputs from all", "header in tables: tables.append(header) tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1 # add column", "os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer =", "non-exist part. l_n1 = l_n[b] i_nlu1 = i_nlu[b] for i_noln in range(num_out_layers_n): i_layer", "return nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = [] hds =", "orig_to_tok_index: map the index of 1st-level-token to the index of 2nd-level-token tok_to_orig_index: inverse", "= i_nlu[b] for i_noln in range(num_out_layers_n): i_layer = num_hidden_layers - 1 - i_noln", "enumerate(foreign_key): relations[2][i][0] = item # nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0)", "input_schema.column_names_embedder_input # table name . column tables = [] tb_name = {} #", "current_hds1 = [] current_table = '' for hds1 in all_hds: hds1_table = hds1.split('.')[0].strip()", "base # tokenize nodes to feed into model masks = [] new_schema =", "input_mask = [] i_nlu = [] # index to retreive the position of", "len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes", "dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for", "= [] for hds1 in all_hds: new_hds1 = current_hds1 + [hds1] tokens1, segment_ids1,", "input_mask1 = [1] * len(input_ids1) # 3. Zero-pad up to the sequence length.", "max_seq_length=512,num_out_layers_n=1): # only get graph encoding without input_sequence dependency nodes=relations=new_schema=None if bert_input_version ==", "segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1)", "node - len(nodes) columns = {} nodes = [] foreign_idx = [ i", "if col.strip() != '*': # print(header,col) # first add headers nodes.append(i) # if", "t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) #", "= tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized", "> 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema,", "# 3. Zero-pad up to the sequence length. if len(nlu_t) == 1: max_seq_length", "len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0]", "i_nlu = (i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\"", "each tokens. \"\"\" bS = len(l_n) l_n_max = max(l_n) # print('wemb_n: [bS, l_n_max,", "= # of columns * # of batch_size i_hds = [(17, 18), (19,", "== len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] ==", "len(input_sequence) schema_token_states = [] cnt = -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11", "== len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1)", "item # nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i in", "all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h):", "[hds1] current_table = hds1_table if len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1", "l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n):", "wemb_n = torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for b in range(bS): # [B,", "l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here, input is toknized further", "l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt,", "+= tables base = len(nodes) nodes += tables for i in relations: for", "masks = [] new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i])", "input_schema.table_schema['foreign_keys']] primary_idx = [ i for i in input_schema.table_schema['primary_keys']] foreign_key = [-1] *", "assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The", "or table to column (we choose latter one) base = len(nodes) nodes +=", "edge types, we use tb_name.col as embedding # print(relations) all_columns = {} #", "model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok,", "< len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError", "segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1)", "# column id columns[col] = len(nodes) -1 # assume primary key have \"id\"", "tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to use it later segment_ids.append(0) for token in", "current_table = hds1_table if len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 =", "length of columns for each batch input_ids = [] tokens = [] segment_ids", "hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks #", "# nlu_tt1 += tokenizer.tokenize(token) # current_hds1 = [] # for hds1 in all_hds:", "= [] max_seq_length = 0 nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 =", "headers # get the wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n)", "hds = [] max_seq_length = 0 nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1", "column tables = [] tb_name = {} # index of header in node", "input_ids = [] tokens = [] segment_ids = [] input_mask = [] i_nlu", "number indicates where sub-token belongs to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = []", "= current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1)", "+= ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def prepare_input_v2(tokenizer,", "t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) == len(input_sequence) # schema_token_states = []", "and 0 for padding tokens. Only real # tokens are attended to. input_mask1", "= t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end = l_n[0] else: end = t_to_tt_idx[i+1]", "correspond to foreign key if len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header]", "wemb_h.size()) b_pu = -1 for b, i_hds1 in enumerate(i_hds): for b1, i_hds11 in", "t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) #", "in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt += 1 # schema_token_states1", "else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config def generate_inputs(tokenizer,", "'mcS': do_lower_case = False else: do_lower_case = True no_pretraining = False bert_config_file =", "# first add headers nodes.append(i) # if not col in columns: if not", "for i in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1:", "assert len(input_ids1) == max_seq_length assert len(input_mask1) == max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1)", "each batch input_ids = [] tokens = [] segment_ids = [] input_mask =", "get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph encoding without input_sequence dependency nodes=relations=new_schema=None if bert_input_version", "columns as if it is a batch of natural language utterance with batch-size", "[] segment_ids = [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to", "2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n = [] l_hs = [] # The", "i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat columns", "all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) # all_encoder_layer", "* # nodes += tables base = len(nodes) nodes += tables for i", "max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1):", "header in node - len(nodes) columns = {} nodes = [] foreign_idx =", "= schema.nodes masks = [] new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i])", "# wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert,", "29), (30, 34)]) \"\"\" l_hpu = [] for i_hds1 in i_hds: for i_hds11", "position of original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i)", "key if len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign", "input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 = [] for (i, token) in enumerate(nlu_t1): nlu_tt1", "i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if", "start = t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end = l_n[0] else: end =", "BERT intput tokens # i_nlu: start and end indices of question in tokens", "pooled_output, tokens, i_nlu, i_hds, \\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds", "# print(relations) # print(nodes,relations) # for (i, token) in enumerate(nlu_t1): # nlu_tt1 +=", "of question in tokens # i_hds: start and end indices of headers #", "in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1 schema_token_states1 = [] for", "max input token length OUTPUT tokens: BERT input tokens nlu_tt: WP-tokenized input natural", "return model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens = [] segment_ids =", "= t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] # else:", "i in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1", "as representation for each schema items relations = [torch.tensor(i, dtype=torch.long).to(device) for i in", "for i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end =", "= -1 for b, i_hds1 in enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1): b_pu", "== 'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS': do_lower_case = False", "...]. ... [t2-c1-t1, ...,] ] \"\"\" bS = len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds", "# print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i in range(len(nodes)): # new_schema.append(nodes[i]) #", "max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 = [hds1] # else: # current_hds1", "i_hds.append(i_hds1) # Convert to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device)", "else: relations[0].append([tb_name[header],columns[col]]) # for * # nodes += tables base = len(nodes) nodes", "- len(nodes) columns = {} nodes = [] foreign_idx = [ i for", "input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn above", "nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i in relations: for", "= [] new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i]", "map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS':", "max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) # relations = input_schema.relations # TODO: feed into", "= False else: do_lower_case = True no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json')", "print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0)", "+= tokenizer.tokenize(token) # current_hds1 = [] # for hds1 in all_hds: # new_hds1", "st = i_noln * hS ed = (i_noln + 1) * hS wemb_n[b,", "model_bert: :param tokenizer: WordPiece toknizer :param nlu: Question :param nlu_t: CoreNLP tokenized nlu.", "do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained", "= [] l_hs = [] # The length of columns for each batch", "t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1)", "l_n1 = l_n[b] i_nlu1 = i_nlu[b] for i_noln in range(num_out_layers_n): i_layer = num_hidden_layers", "input_sequence dependency nodes=relations=new_schema=None if bert_input_version == 'v1': nodes, relations, new_schema = prepare_input_gnn( tokenizer,", "layer #add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn: if", "= [] for i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i == len(t_to_tt_idx)-1:", "EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds):", "def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12',", "\"\"\" Get the representation of each tokens. \"\"\" bS = len(l_n) l_n_max =", "batch input_ids = [] tokens = [] segment_ids = [] input_mask = []", "to the sequence length. if len(nlu_t) == 1: max_seq_length = len(input_ids1) while len(input_ids1)", "l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and len(set(i_nlu)) == 1 assert l_n[0]", "= input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form + tables # if", "schema.nodes masks = [] new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i]) #", "len(t_to_tt_idx)-1: # end = l_n[0] # else: # end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:],", "copy import deepcopy import torch import torch.nn as nn import torch.nn.functional as F", "i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\ l_n, l_hpu, l_hs, \\ nlu_tt,", "= [] # The length of columns for each batch input_ids = []", "to edge type \"\"\" nlu_t = [] hds = [] nlu_t1 = input_sequence", "False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin')", "header in node - len(nodes) columns = {} nodes = [] relations =", "dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT", "# current_hds1 = [hds1] # else: # current_hds1 = new_hds1 # if len(current_hds1)", "tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1 [SEP] col2 [SEP] ...col-n [SEP]", "of i-th-1st-level-token for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the", "# The length of columns for each batch input_ids = [] tokens =", "'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL' or", "tokenization using WordPiece tt_to_t_idx1 = [] # number indicates where sub-token belongs to", "segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1) > max_seq_length:", "corresponds to edge type \"\"\" nlu_t = [] hds = [] nlu_t1 =", "table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input):", "else: raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1", "#add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn: if not", "nodes to feed into model masks = [] new_schema = [] for i", "for i in relations: for j in i: j[0] += base # tokenize", "modified from https://github.com/naver/sqlova import os, json import random as rd from copy import", "i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1) > max_seq_length: #", "for i in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i])", "relations[2][i][0] = item # nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for", "t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ] \"\"\" bS = len(l_hs)", "= tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for (i, token) in", "\"\"\" Return: Nodes(list of tokenized db items) Return: relations(lists of list of related", "nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual output of all tokens from", "i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1", "of list of related columns), inner list corresponds to edge type \"\"\" nlu_t", "# assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states = [] # for i in", "'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or params.bert_type_abb ==", "masks = [] ## update new schema new_schema = input_schema.column_names_surface_form if len(new_schema) !=", "[] for i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) +", "21), (22, 23), (24, 25), (26, 29), (30, 34)]) \"\"\" l_hpu = []", "# print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i", "column name to columns with index in nodes as value # take redundancy", "= input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0] = item # nodes += tables", "mask has 1 for real tokens and 0 for padding tokens. Only real", "# if i == len(t_to_tt_idx)-1: # end = l_n[0] # else: # end", "pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config def", "contextual vector later. i_hds = [] doc_tokens = [] nlu_tt = [] t_to_tt_idx", "# all_doc_tokens[ indicate the start position of original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token)", "= len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length", "= None if not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i in nodes]", "# table name.column tables = [] tb_name = {} # index of header", "# if i == len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] # else: # end", "in node - len(nodes) columns = {} nodes = [] relations = [[],[],[]]", "BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert,", "print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i in relations: for j in i: j[0]", "all_tokens. nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token segement of", "final output layer #add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if", "if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ] \"\"\"", "assert len(utterance_states) == len(input_sequence) schema_token_states = [] cnt = -1 for t_to_tt_idx_hds1 in", "dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input)", "= [] ## update new schema new_schema = input_schema.column_names_surface_form if len(new_schema) != len(nodes):", "indices of question in tokens # i_hds: start and end indices of headers", "i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): #", "i in relations] # print(333333,relations, all_encoder_layer.size()) output = [i for i in gnn(all_encoder_layer,relations)]", "schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states) ==", "[hds1] else: current_hds1 = new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t,", "end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states = [] cnt", "length. if len(nlu_t) == 1: max_seq_length = len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0)", "j[0] += base # tokenize nodes to feed into model masks = []", "num_out_layers_n] = ', bS, l_n_max, hS * num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS", "= t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) == len(input_sequence) # schema_token_states =", "new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema", "pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens =", "Zero-pad up to the sequence length. if len(nlu_t) == 1: max_seq_length = len(input_ids1)", "bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\")", "1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i] = start index of i-th-1st-level-token", "bert_input_version == 'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) #", "nlu_t, hds, max_seq_length) # all_encoder_layer: BERT outputs from all layers. # pooled_output: output", "[] segment_ids = [] input_mask = [] i_nlu = [] # index to", "layers. # pooled_output: output of [CLS] vec. # tokens: BERT intput tokens #", "max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor all_input_ids =", "* num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h] = ', wemb_h.size()) b_pu", "of related columns), inner list corresponds to edge type \"\"\" nlu_t = []", "# print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1 in foreign_idx or len(nodes)-1 in", "tokenizer: WordPiece toknizer :param nlu: Question :param nlu_t: CoreNLP tokenized nlu. :param hds:", "tt_to_t_idx = [] t_to_tt_idx_hds = [] for b, nlu_t1 in enumerate(nlu_t): hds1 =", "(i, token) in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) # current_hds1 = [] #", "* hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h", "sub-token segement of i-th-1st-level-token for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[", "len(nodes) -1 # assume primary key have \"id\" if col.find(\"id\") != -1: #", "for i in relations] # print(333333,relations, all_encoder_layer.size()) output = [i for i in", "i_hds1 in enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1): b_pu += 1 for i_nolh", "= current_hds1 + [hds1] # tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1,", "# for i in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] # if i ==", "hds1 = hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization using WordPiece tt_to_t_idx1 = []", "import torch import torch.nn as nn import torch.nn.functional as F from .gated_graph_conv import", "hds11 in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 = [] sub_tok = [] for", "l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1 [SEP] col2 [SEP] ...col-n [SEP] # 2.", "for i in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for", "use tb_name.col as embedding # print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for", "# hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list", "{} # index of header in node - len(nodes) columns = {} nodes", "# print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds: #", "i_noln st = i_noln * hS ed = (i_noln + 1) * hS", "i: j[0] += base # tokenize nodes to feed into model masks =", "[] tb_name = {} # index of header in node - len(nodes) columns", "l_n_max, hS * num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for b", "= [] # index to retreive the position of contextual vector later. i_hds", "masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i])", "columns: # find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation", "all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token", "index of i-th-1st-level-token in all_tokens. nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i] ] returns", "1) * hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return", "+ [hds1] # tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) #", "(i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the start position of", "l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the representation of each tokens. \"\"\"", "list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states = [] # for i in range(len(t_to_tt_idx)): #", "max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence) #", "num_out_layers_h=1): # get contextual output of all tokens from bert all_encoder_layer, pooled_output, tokens,", "in range(num_out_layers_h): i_layer = num_hidden_layers - 1 - i_nolh st = i_nolh *", "F from .gated_graph_conv import GatedGraphConv from .bert import tokenization as tokenization from .bert.modeling", "new_hds1) # if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 =", "num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h] = ', wemb_h.size()) b_pu =", "nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1", "{} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds: # print(i.split('.')) if i !=", "torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL':", "nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph", "for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0] =", "l_hpu from i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and len(set(i_nlu)) ==", "by WordPiece (WP) tokenizer and fed into BERT. INPUT :param model_bert: :param tokenizer:", "col in columns: # find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign", "= [(17, 18), (19, 21), (22, 23), (24, 25), (26, 29), (30, 34)])", "for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt += 1", "i in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for i", "hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1)", "all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if len(nodes)", "t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1 schema_token_states1 = [] for i", "i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1)", "range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1: # end =", "max_seq_length) elif bert_input_version == 'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence,", "in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the start position of original 'white-space'", "len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12):", "is a batch of natural language utterance with batch-size = # of columns", "input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config,", "# start = t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1: # end = l_n[0]", "input_sequence, input_schema, max_seq_length): nlu_t = [] hds = [] nlu_t1 = input_sequence #", "all_segment_ids, all_input_mask) # 5. generate l_hpu from i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n))", "input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema", "= l_n[b] i_nlu1 = i_nlu[b] for i_noln in range(num_out_layers_n): i_layer = num_hidden_layers -", "nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for i,", "wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs,", "\"\"\" bS = len(l_n) l_n_max = max(l_n) # print('wemb_n: [bS, l_n_max, hS *", "== 'v1': nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version", "[0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema", "wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t,", "l_n_max = max(l_n) # print('wemb_n: [bS, l_n_max, hS * num_out_layers_n] = ', bS,", "new_hds1 = current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1,", "is toknized further by WordPiece (WP) tokenizer and fed into BERT. INPUT :param", "vec. # tokens: BERT intput tokens # i_nlu: start and end indices of", "i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\"", "return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized", "23), (24, 25), (26, 29), (30, 34)]) \"\"\" l_hpu = [] for i_hds1", "end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) ==", "i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer,", "len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i]", "= i.split('.') # if col.strip() != '*': # print(header,col) # first add headers", "for i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers - 1 - i_nolh st =", "i in input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]]", "above final output layer #add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None", "= BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining:", "= [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to use it later segment_ids.append(0) for", "', wemb_h.size()) b_pu = -1 for b, i_hds1 in enumerate(i_hds): for b1, i_hds11", "range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end", "hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def", "i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1", "#column relations[1] = [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item", "= t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) #", "{} nodes = [] foreign_idx = [ i for i,j in input_schema.table_schema['foreign_keys']] primary_idx", "= prepare_input_v2(tokenizer, input_sequence, input_schema) # relations = input_schema.relations # TODO: feed into gnn", "!= -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for * # nodes +=", "* (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema):", "b_pu = -1 for b, i_hds1 in enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1):", "if not header in tables: tables.append(header) tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1 #", "0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes", "print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i", "BERT outputs from all layers. # pooled_output: output of [CLS] vec. # tokens:", "index in nodes as value # take redundancy for foreign key if col", "token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] current_table = '' for", "CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i] = start index of i-th-1st-level-token in all_tokens.", "token length OUTPUT tokens: BERT input tokens nlu_tt: WP-tokenized input natural language questions", "pooled_output: output of [CLS] vec. # tokens: BERT intput tokens # i_nlu: start", "wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config,", "max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table = hds1_table if len(current_hds1) >", "len(t_to_tt_idx)-1: end = l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) ==", "[gnn_encoder1(i.squeeze()) for i in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer]", "gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn above final output layer", "assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert", "len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get", "def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here, input is toknized further by", "i_hds1 in i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def", "num_hidden_layers - 1 - i_noln st = i_noln * hS ed = (i_noln", "input_schema, max_seq_length) elif bert_input_version == 'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema)", "= [] nlu_tt = [] t_to_tt_idx = [] tt_to_t_idx = [] t_to_tt_idx_hds =", "return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx,", "[hds1] # else: # current_hds1 = new_hds1 # if len(current_hds1) > 0: #", "i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer,", "if i != \"*\" and len(i.split('.')) > 1: header,col = i.split('.') # if", "if bert_input_version == 'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version", "of batch_size i_hds = [(17, 18), (19, 21), (22, 23), (24, 25), (26,", "fed into BERT. INPUT :param model_bert: :param tokenizer: WordPiece toknizer :param nlu: Question", "...col-n [SEP] # 2. Generate BERT inputs & indices. tokens1, segment_ids1, i_nlu1, i_hds1,", "of natural language utterance with batch-size = # of columns * # of", "relations = input_schema.relations # TODO: feed into gnn and return embedding # print(relations)", "tokens. Only real # tokens are attended to. input_mask1 = [1] * len(input_ids1)", "< max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert len(input_mask1) == max_seq_length", "i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return", "further by WordPiece (WP) tokenizer and fed into BERT. INPUT :param model_bert: :param", "TODO: feed into gnn and return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks =", "# print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation else: # column id columns[col]", "# nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for (i,", "input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The mask has 1 for real", "tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return", "value # take redundancy for foreign key if col in columns: # find('id')", "dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return", "schema_token_states1 = [] for i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i ==", "# three edge types, we use tb_name.col as embedding # print(relations) all_columns =", "* num_out_layers_h] = ', wemb_h.size()) b_pu = -1 for b, i_hds1 in enumerate(i_hds):", "for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] for hds1", "nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def", "len(l_n) l_n_max = max(l_n) # print('wemb_n: [bS, l_n_max, hS * num_out_layers_n] = ',", "NOTE: add gnn above final output layer #add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form)", "schema_token_states1 = [] # for i in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] #", "masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes,", "i in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in all_encoder_layer] # all_encoder_layer", "'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or params.bert_type_abb", "pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds =", "= input_schema.column_names_embedder_input # table name . column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for", "utterance_states = [] for i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i ==", "sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form)", "= generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) #", "len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three edge types, we use tb_name.col", "tokens and 0 for padding tokens. Only real # tokens are attended to.", "# 2. Generate BERT inputs & indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 =", "get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2,", "len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for i, hds11 in enumerate(hds1): i_st_hd =", "not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower()", "print(2222222,type(input_nodes),input_nodes) masks = None input_nodes = nodes all_encoder_layer = None if not embedder:", "the sequence length. if len(nlu_t) == 1: max_seq_length = len(input_ids1) while len(input_ids1) <", "# Convert to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids", "feed into model masks = [] ## update new schema new_schema = input_schema.column_names_surface_form", "tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1)", "tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1)", "os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config", "[] tt_to_t_idx = [] t_to_tt_idx_hds = [] for b, nlu_t1 in enumerate(nlu_t): hds1", "in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0] = item #", "nodes = schema.nodes masks = [] new_schema = [] for i in range(len(nodes)):", "len(nodes)-1 # add column name to columns with index in nodes as value", "input is toknized further by WordPiece (WP) tokenizer and fed into BERT. INPUT", "graph encoding without input_sequence dependency nodes=relations=new_schema=None if bert_input_version == 'v1': nodes, relations, new_schema", "Headers :param hs_t: None or 1st-level tokenized headers :param max_seq_length: max input token", "use it later segment_ids.append(0) for token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens)", "# modified from https://github.com/naver/sqlova import os, json import random as rd from copy", "it later segment_ids.append(0) for token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\")", "length OUTPUT tokens: BERT input tokens nlu_tt: WP-tokenized input natural language questions orig_to_tok_index:", "for foreign key if col in columns: # find('id') != -1 # print('key')", "contextual output of all tokens from bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n,", "# print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds: # print(i.split('.')) if i != \"*\"", "# print(i.split('.')) if i != \"*\" and len(i.split('.')) > 1: header,col = i.split('.')", "id columns[col] = len(nodes) -1 # assume primary key have \"id\" if col.find(\"id\")", "into model masks = [] new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i])", "the position of contextual vector later. i_hds = [] doc_tokens = [] nlu_tt", "be column to column or table to column (we choose latter one) base", "len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) #", "input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0] = item # nodes += tables #", "# print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if", "max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items) Return: relations(lists of list of", "end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) == len(input_sequence) # schema_token_states", "torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT output. all_encoder_layer, pooled_output", "column id columns[col] = len(nodes) -1 # assume primary key have \"id\" if", "else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states = []", "and fed into BERT. INPUT :param model_bert: :param tokenizer: WordPiece toknizer :param nlu:", "have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) -", "t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end = l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:],", "tokenized db items) Return: relations(lists of list of related columns), inner list corresponds", "if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form +=", "'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens", "[] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_surface_form # table name.column", "= BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return", "# all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token segement of i-th-1st-level-token for (i, token)", "input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert,", "items relations = [torch.tensor(i, dtype=torch.long).to(device) for i in relations] # print(333333,relations, all_encoder_layer.size()) output", "from all layers. # pooled_output: output of [CLS] vec. # tokens: BERT intput", "+ [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) #", "= input_sequence # segmented question all_hds = input_schema.column_names_embedder_input # table name . column", "representation for each schema items relations = [torch.tensor(i, dtype=torch.long).to(device) for i in relations]", "in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) # current_hds1 = [] # for hds1", "+= tables relations[0] = relations[0] #column relations[1] = [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary", "= all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) # all_encoder_layer =", "# if len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i", "foreign key if len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE:", "# exit(0) for i in relations: for j in i: j[0] += base", "cnt += 1 # schema_token_states1 = [] # for i in range(len(t_to_tt_idx_hds11)): #", "(pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t", "= t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1: # end = l_n[0] # else:", "= [] # for i in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] # if", ":] return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As", "nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph encoding without input_sequence", "col2 [SEP] ...col-n [SEP] # 2. Generate BERT inputs & indices. tokens1, segment_ids1,", "len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) #", "output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1):", "len(nodes) nodes += tables relations[0] = relations[0] #column relations[1] = [[i,j] for i,j", "return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here, input is toknized", "== 'mcS': do_lower_case = False else: do_lower_case = True no_pretraining = False bert_config_file", "max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only", "max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual output of all tokens from bert all_encoder_layer,", "= [] cnt = -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1:", "* # of batch_size i_hds = [(17, 18), (19, 21), (22, 23), (24,", "= (i_nolh + 1) * hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\ =", "prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = [] hds = [] max_seq_length = 0 nlu_t1", "= tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] *", "t_to_tt_idx_hds1: # cnt += 1 # schema_token_states1 = [] # for i in", "i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here, input is", "# if col.strip() != '*': # print(header,col) # first add headers nodes.append(i) #", "new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items)", "= prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds", "[] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok", "relations = [[],[],[]] # three edge types, we use tb_name.col as embedding #", "tb_name[header] ## NOTE: foreign key relation can be column to column or table", "tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version == 'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence,", "for i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end =", "vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load", "t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t = [] hds =", "for i in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) #", "# [B, max_len, dim] # Fill zero for non-exist part. l_n1 = l_n[b]", "(22, 23), (24, 25), (26, 29), (30, 34)]) \"\"\" l_hpu = [] for", "question in tokens # i_hds: start and end indices of headers # get", "do_lower_case = True no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH,", "nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0)", "# nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) #", "sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd =", "t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 =", "relation # relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col] = len(nodes) -1 # assume", "num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in", "current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1,", "all_hds = input_schema.column_names_surface_form # table name.column tables = [] tb_name = {} #", "torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in all_encoder_layer]", "* len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\")", "+= ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) #", "num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds,", "i == len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] # else: # end = t_to_tt_idx_hds11[i+1]", "# new_hds1 = current_hds1 + [hds1] # tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 =", "# get contextual output of all tokens from bert all_encoder_layer, pooled_output, tokens, i_nlu,", "1 assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer =", "using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1 [SEP]", "# add column name to columns with index in nodes as value #", "batch-size = # of columns * # of batch_size i_hds = [(17, 18),", "> 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes =", "-1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for * # nodes += tables", "output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate l_hpu from i_hds", "= [] tokens = [] segment_ids = [] input_mask = [] i_nlu =", "end = l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence)", "to columns with index in nodes as value # take redundancy for foreign", "'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb", "len(i.split('.')) > 1: header,col = i.split('.') # if col.strip() != '*': # print(header,col)", "prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer,", "model masks = [] new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i]) #", "# print('wemb_n: [bS, l_n_max, hS * num_out_layers_n] = ', bS, l_n_max, hS *", "* hS ed = (i_nolh + 1) * hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]),", "# index of header in node - len(nodes) columns = {} nodes =", "- 1 - i_nolh st = i_nolh * hS ed = (i_nolh +", "i_layer = num_hidden_layers - 1 - i_noln st = i_noln * hS ed", "# pooled_output: output of [CLS] vec. # tokens: BERT intput tokens # i_nlu:", "len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign key relation", "range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end = l_n[0] else: end", "new schema new_schema = input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form +", "masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer", "i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1)", "else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if", "i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert", "wemb_n, wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence,", "schema_token_states = [] # cnt = -1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds: #", "max_seq_length): nlu_t = [] hds = [] nlu_t1 = input_sequence # segmented question", "i == len(t_to_tt_idx)-1: # end = l_n[0] # else: # end = t_to_tt_idx[i+1]", "nodes = [] foreign_idx = [ i for i,j in input_schema.table_schema['foreign_keys']] primary_idx =", "all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for i", "in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1: # end", "i == len(t_to_tt_idx)-1: end = l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert", "if i == len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] # else: # end =", "num_out_layers_n]).to(device) for b in range(bS): # [B, max_len, dim] # Fill zero for", "tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized using", "i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask,", "t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to use it later segment_ids.append(0)", "['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence,", "nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] =", "len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i in range(len(nodes)):", "# 1. 2nd tokenization using WordPiece tt_to_t_idx1 = [] # number indicates where", "all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token segement of i-th-1st-level-token for (i, token) in", "nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt,", "nlu_t: CoreNLP tokenized nlu. :param hds: Headers :param hs_t: None or 1st-level tokenized", "'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if", "nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer: BERT", "= get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer: BERT outputs from all layers.", "tok_to_orig_index: inverse map. \"\"\" l_n = [] l_hs = [] # The length", "1) * hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return", "# nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i in relations:", "l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length)", "dtype=torch.long).to(device) for i in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1)", "i_st_nlu = len(tokens) # to use it later segment_ids.append(0) for token in nlu1_tok:", ":param nlu_t: CoreNLP tokenized nlu. :param hds: Headers :param hs_t: None or 1st-level", "padding tokens. Only real # tokens are attended to. input_mask1 = [1] *", "elif bert_input_version == 'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema)", "real tokens and 0 for padding tokens. Only real # tokens are attended", "[] hds = [] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_embedder_input", "+= tables for i in relations: for j in i: j[0] += base", "= len(nodes) -1 # assume primary key have \"id\" if col.find(\"id\") != -1:", "for b, nlu_t1 in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization", "== len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config,", "max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h:", "OUTPUT tokens: BERT input tokens nlu_tt: WP-tokenized input natural language questions orig_to_tok_index: map", ". column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1): nlu_tt1", "hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 =", "segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for i, hds11 in", "# take redundancy for foreign key if col in columns: # find('id') !=", "[hds1] # tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) # if", "= {} nodes = [] foreign_idx = [ i for i,j in input_schema.table_schema['foreign_keys']]", "== len(input_sequence) schema_token_states = [] cnt = -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for", "hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of", "return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat columns as", "first add headers nodes.append(i) # if not col in columns: if not header", "inner list corresponds to edge type \"\"\" nlu_t = [] hds = []", "Return: Nodes(list of tokenized db items) Return: relations(lists of list of related columns),", "l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer,", "l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length):", "relations = [torch.tensor(i, dtype=torch.long).to(device) for i in relations] # print(333333,relations, all_encoder_layer.size()) output =", "= generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1]", "= t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in", "[] # all_doc_tokens[ orig_to_tok_idx[i] ] returns first sub-token segement of i-th-1st-level-token for (i,", "\"*\" and len(i.split('.')) > 1: header,col = i.split('.') # if col.strip() != '*':", "b, i_hds1 in enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1): b_pu += 1 for", "!= len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i in range(len(nodes)): #", "assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return utterance_states, schema_token_states, relations else: return utterance_states,", "nodes to feed into model masks = [] ## update new schema new_schema", "\\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t = []", "schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn: if not (input_schema.table_schema['table_names'][0] in", "max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert", "relation else: # column id columns[col] = len(nodes) -1 # assume primary key", "= [] for i_hds1 in i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0])", "[] l_hs = [] # The length of columns for each batch input_ids", "in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd = len(tokens)", "= l_hpu[cnt] # else: # end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert", "len(new_schema) ==len(nodes) for i in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i])", "if not (len(nodes) - 1 in foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) #", "[] # for i in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] # if i", "0 nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 = [] for (i, token)", "input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length =", "it is a batch of natural language utterance with batch-size = # of", "segement of i-th-1st-level-token for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate", "torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask)", "input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn above final", "bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx,", "feed into gnn and return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None", "i_nlu, i_hds, \\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu,", "[[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0]", "table name correspond to foreign key if len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)]", "if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer):", "BERT inputs & indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1)", "tokens # i_hds: start and end indices of headers # get the wemb", "# segmented question all_hds = input_schema.column_names_surface_form # table name.column tables = [] tb_name", "current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table = hds1_table", "cnt += 1 schema_token_states1 = [] for i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i]", "nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db", "tokens = [] segment_ids = [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens)", "Generate BERT inputs & indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1,", "layer output as representation for each schema items relations = [torch.tensor(i, dtype=torch.long).to(device) for", "= t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input)", "columns[col] = len(nodes) -1 # assume primary key have \"id\" if col.find(\"id\") !=", "l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states =", "tokenize nodes to feed into model masks = [] ## update new schema", "i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and len(set(i_nlu)) == 1 assert", "None input_nodes = nodes all_encoder_layer = None if not embedder: input_nodes =[ torch.tensor([i],", "to feed into model masks = [] new_schema = [] for i in", "input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor all_input_ids = torch.tensor(input_ids,", "i != \"*\" and len(i.split('.')) > 1: header,col = i.split('.') # if col.strip()", "t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1 schema_token_states1 = []", "- i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer,", "# utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) == len(input_sequence) # schema_token_states = [] #", "def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): #", "[(17, 18), (19, 21), (22, 23), (24, 25), (26, 29), (30, 34)]) \"\"\"", "- i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs,", "-1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1 schema_token_states1", "input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence,", "# t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for", "# assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return", "t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer: BERT outputs", "model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate l_hpu from i_hds l_hpu = gen_l_hpu(i_hds) assert", "Get the representation of each tokens. \"\"\" bS = len(l_n) l_n_max = max(l_n)", "token in i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence)", "1: header,col = i.split('.') # if col.strip() != '*': # print(header,col) # first", "= [ i for i in input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx) #", "# all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] # get hidden layer output", "in enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1): b_pu += 1 for i_nolh in", "t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds)", "i_ed_hd)) segment_ids += [1] * len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif", "print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i]", "of header in node - len(nodes) columns = {} nodes = [] foreign_idx", "= input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 = [] for (i, token) in enumerate(nlu_t1):", "l_n = [] l_hs = [] # The length of columns for each", "len(set(l_n)) == 1 and len(set(i_nlu)) == 1 assert l_n[0] == i_nlu[0][1] - i_nlu[0][0]", "nlu_t = [] hds = [] nlu_t1 = input_sequence # segmented question all_hds", "foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign key relation can be column to column", "hds, max_seq_length): \"\"\" Here, input is toknized further by WordPiece (WP) tokenizer and", "= tb_name[header] ## NOTE: foreign key relation can be column to column or", "utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) == len(input_sequence) # schema_token_states = [] # cnt", "t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the start position of original 'white-space' tokens. sub_tokens", "hS * num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for b in", "i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat columns as if it is", "> 1: header,col = i.split('.') # if col.strip() != '*': # print(header,col) #", "tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx =", "2nd tokenization using WordPiece tt_to_t_idx1 = [] # number indicates where sub-token belongs", "cnt = -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt +=", ":param nlu: Question :param nlu_t: CoreNLP tokenized nlu. :param hds: Headers :param hs_t:", "input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn: if not (input_schema.table_schema['table_names'][0]", "toknized further by WordPiece (WP) tokenizer and fed into BERT. INPUT :param model_bert:", "inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) # relations = input_schema.relations #", "name . column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1):", "==len(nodes) for i in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) #", "as nn import torch.nn.functional as F from .gated_graph_conv import GatedGraphConv from .bert import", "range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]']", "import torch.nn as nn import torch.nn.functional as F from .gated_graph_conv import GatedGraphConv from", "= [] hds = [] max_seq_length = 0 nlu_t1 = input_sequence all_hds =", "= torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT output. all_encoder_layer,", "nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 = [] for (i, token) in", "= torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb =", "= all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1,", "def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items) Return:", "current_hds1 = [hds1] # else: # current_hds1 = new_hds1 # if len(current_hds1) >", "schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence,", "nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version == 'v1':", "get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual output of", "The length of columns for each batch input_ids = [] tokens = []", "list of related columns), inner list corresponds to edge type \"\"\" nlu_t =", "wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu,", "= None input_nodes = nodes all_encoder_layer = None if not embedder: input_nodes =[", "assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) #", "sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] * len(sub_tok) if i", "1st-level-token to the index of 2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n = []", "[ i for i in input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys'])", "# print(2222222,type(input_nodes),input_nodes) masks = None input_nodes = nodes all_encoder_layer = None if not", "= torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for b in range(bS): # [B, max_len,", "nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1))", "db items) Return: relations(lists of list of related columns), inner list corresponds to", "tokens. \"\"\" bS = len(l_n) l_n_max = max(l_n) # print('wemb_n: [bS, l_n_max, hS", "tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The mask has 1 for real tokens and", "relations[2].append([tb_name[header],columns[col]]) # add foreign key relation else: # column id columns[col] = len(nodes)", "len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states = [] # for i", "hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx,", "len(set(i_nlu)) == 1 assert l_n[0] == i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output, tokens,", "= prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version == 'v1': nlu_t, hds =", "# i_hds: start and end indices of headers # get the wemb wemb_n", "nodes.append(i) # if not col in columns: if not header in tables: tables.append(header)", "col1 [SEP] col2 [SEP] ...col-n [SEP] # 2. Generate BERT inputs & indices.", "tokenizer.tokenize(token) current_hds1 = [] for hds1 in all_hds: new_hds1 = current_hds1 + [hds1]", "i_hds = [(17, 18), (19, 21), (22, 23), (24, 25), (26, 29), (30,", "* len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three edge types, we use", "new_hds1 = current_hds1 + [hds1] # tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer,", "hds.append(current_hds1) return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph encoding", "gnn and return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None input_nodes =", "len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form + tables # if len(new_schema) != len(nodes):", "# assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] ==", "map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type", "'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': nlu_t,", "l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer,", "https://github.com/naver/sqlova import os, json import random as rd from copy import deepcopy import", "tokenizer and fed into BERT. INPUT :param model_bert: :param tokenizer: WordPiece toknizer :param", "for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd", "1 assert l_n[0] == i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,", "l_hs = [] # The length of columns for each batch input_ids =", "tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for i, hds11", "-1 for b, i_hds1 in enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1): b_pu +=", "embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device)", "CoreNLP tokenized nlu. :param hds: Headers :param hs_t: None or 1st-level tokenized headers", "[] # for i in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] # if i", "(len(nodes) - 1 in foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table", "elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu)", "tables relations[0] = relations[0] #column relations[1] = [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2]", "for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized using WordPiece", "in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0)", "into gnn and return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None input_nodes", "(26, 29), (30, 34)]) \"\"\" l_hpu = [] for i_hds1 in i_hds: for", "[] new_schema = [] for i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] =", "if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else:", "= len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS", "while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert len(input_mask1)", "print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three edge types, we use tb_name.col as embedding", "or params.bert_type_abb == 'mcS': do_lower_case = False else: do_lower_case = True no_pretraining =", "prepare_input_v2(tokenizer, input_sequence, input_schema) # relations = input_schema.relations # TODO: feed into gnn and", "are attended to. input_mask1 = [1] * len(input_ids1) # 3. Zero-pad up to", "model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add", "len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt] # else: # end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:],", "input_schema, max_seq_length) if bert_input_version == 'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length)", "max_seq_length): \"\"\" Here, input is toknized further by WordPiece (WP) tokenizer and fed", "hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual output of all tokens from bert", "key have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) #", "primary key have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not", "model masks = [] ## update new schema new_schema = input_schema.column_names_surface_form if len(new_schema)", "node - len(nodes) columns = {} nodes = [] relations = [[],[],[]] #", "all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] # get hidden layer", "assert len(utterance_states) == len(input_sequence) # schema_token_states = [] # cnt = -1 #", "in node - len(nodes) columns = {} nodes = [] foreign_idx = [", "nodes as value # take redundancy for foreign key if col in columns:", "Return: relations(lists of list of related columns), inner list corresponds to edge type", "prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version == 'v1': nlu_t, hds = prepare_input(tokenizer,", "i_st_hd = len(tokens) t_to_tt_idx_hds11 = [] sub_tok = [] for sub_tok1 in hds11.split():", "else: current_hds1 = new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds", "t_to_tt_idx_hds = [] for b, nlu_t1 in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) #", "[] doc_tokens = [] nlu_tt = [] t_to_tt_idx = [] tt_to_t_idx = []", "# for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt +=", "do_lower_case = False else: do_lower_case = True no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH,", "+= sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] * len(sub_tok) if", "nlu1_tok, hds1): tokens = [] segment_ids = [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu", "headers :param max_seq_length: max input token length OUTPUT tokens: BERT input tokens nlu_tt:", "[] for i_hds1 in i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return", "using WordPiece tt_to_t_idx1 = [] # number indicates where sub-token belongs to in", "three edge types, we use tb_name.col as embedding # print(relations) all_columns = {}", "all_hds = input_schema.column_names_embedder_input nlu_tt1 = [] for (i, token) in enumerate(nlu_t1): nlu_tt1 +=", "print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for", "return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1,", "'./model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'}", "sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1)", "= input_schema.column_names_embedder_input # table name . column tables = [] tb_name = {}", "headers nodes.append(i) # if not col in columns: if not header in tables:", "i,item in enumerate(foreign_key): relations[2][i][0] = item # nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx)", "if len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign key", "[] hds = [] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_surface_form", "map the index of 1st-level-token to the index of 2nd-level-token tok_to_orig_index: inverse map.", "2. Generate BERT inputs & indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer,", "\"\"\" l_hpu = [] for i_hds1 in i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1]", "of 1st-level-token to the index of 2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n =", "nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': nlu_t, hds,", "in enumerate(foreign_key): relations[2][i][0] = item # nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) #", "start and end indices of headers # get the wemb wemb_n = get_wemb_n(i_nlu,", "prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t = [] hds = [] nlu_t1 = input_sequence", "# index to retreive the position of contextual vector later. i_hds = []", "if bert_input_version == 'v1': nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length)", "i in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] # get", "l_n_max, hS * num_out_layers_n] = ', bS, l_n_max, hS * num_out_layers_n) wemb_n =", "3. Zero-pad up to the sequence length. if len(nlu_t) == 1: max_seq_length =", "nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get", "len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) ==", "column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1): nlu_tt1 +=", "from .bert import tokenization as tokenization from .bert.modeling import BertConfig, BertModel device =", "len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0]", "for b, i_hds1 in enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1): b_pu += 1", "[CLS] nlu [SEP] col1 [SEP] col2 [SEP] ...col-n [SEP] # 2. Generate BERT", "if params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS': do_lower_case", "print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds: # print(i.split('.')) if i != \"*\" and", "max(l_n) # print('wemb_n: [bS, l_n_max, hS * num_out_layers_n] = ', bS, l_n_max, hS", "+ 1) * hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :]", "assert len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input)", "tokenizer.tokenize(token) current_hds1 = [] current_table = '' for hds1 in all_hds: hds1_table =", "range(num_out_layers_h): i_layer = num_hidden_layers - 1 - i_nolh st = i_nolh * hS", "tokens, i_nlu, i_hds, \\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def", "print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in", "hds1 in all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1) else: tokens1,", "types, we use tb_name.col as embedding # print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds)", "(30, 34)]) \"\"\" l_hpu = [] for i_hds1 in i_hds: for i_hds11 in", "in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for i in", "tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu'))", "\"\"\" l_n = [] l_hs = [] # The length of columns for", "= [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key):", "def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items) Return:", "l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t =", "max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph encoding without input_sequence dependency nodes=relations=new_schema=None", "[table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ] \"\"\" bS =", "current_table = '' for hds1 in all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table ==", "WordPiece (WP) tokenizer and fed into BERT. INPUT :param model_bert: :param tokenizer: WordPiece", "= len(tokens) # to use it later segment_ids.append(0) for token in nlu1_tok: tokens.append(token)", "== len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i in range(len(t_to_tt_idx)): start =", "= {} nodes = [] relations = [[],[],[]] # three edge types, we", "in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization using WordPiece tt_to_t_idx1", "i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i] if i == len(t_to_tt_idx)-1: end = l_n[0]", "a batch of natural language utterance with batch-size = # of columns *", "= [1] * len(input_ids1) # 3. Zero-pad up to the sequence length. if", "i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] * len(sub_tok) if i <", "tokens from bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt,", "not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i in nodes] masks = torch.tensor(masks,", "== 'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2':", "= {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds: # print(i.split('.')) if i", "max_seq_length assert len(input_mask1) == max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1)", "= -1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: #", "len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn(", "print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations = None if use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input +=", "utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states = [] cnt = -1 for", "torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0)", "for hds1 in all_hds: # new_hds1 = current_hds1 + [hds1] # tokens1, segment_ids1,", "tokenize nodes to feed into model masks = [] new_schema = [] for", "nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_embedder_input # table name .", "= new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,", "== len(input_sequence) # schema_token_states = [] # cnt = -1 # for t_to_tt_idx_hds1", "def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = [] hds = [] max_seq_length = 0", "all_encoder_layer, num_out_layers_n): \"\"\" Get the representation of each tokens. \"\"\" bS = len(l_n)", "hds1_table = hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1,", "[SEP] # 2. Generate BERT inputs & indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1", "range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) +", "in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] current_table = '' for hds1", "len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] * len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\")", "= ', bS, l_n_max, hS * num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS *", "# for (i, token) in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) # current_hds1 =", "# table name . column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for (i, token)", "'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL'", "== 'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu,", "[SEP] ...col-n [SEP] # 2. Generate BERT inputs & indices. tokens1, segment_ids1, i_nlu1,", "we use tb_name.col as embedding # print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*')", "st = i_nolh * hS ed = (i_nolh + 1) * hS wemb_h[b_pu,", "primary key have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]])", "for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) # all_doc_tokens[ indicate the start position", "add column name to columns with index in nodes as value # take", "## update new schema new_schema = input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema =", "tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx", "len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The mask has 1", "generate_inputs(tokenizer, nlu1_tok, hds1): tokens = [] segment_ids = [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\")", "# all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for", "max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx,", "bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if", "gnn above final output layer #add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations =", "l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) #", "nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] for hds1 in all_hds: new_hds1 = current_hds1", "= (i_noln + 1) * hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b,", "the wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds,", "num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...].", "max_seq_length = len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) ==", "input_sequence # segmented question all_hds = input_schema.column_names_surface_form # table name.column tables = []", "input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i in", "get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here, input is toknized further by WordPiece", "len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu", "tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the representation", "input_schema.column_names_surface_form + tables # if len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema)", "get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu, l_hs,", "input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1 assert len(relations[0])", "of [CLS] vec. # tokens: BERT intput tokens # i_nlu: start and end", "t_to_tt_idx_hds1: cnt += 1 schema_token_states1 = [] for i in range(len(t_to_tt_idx_hds11)): start =", "tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu,", "i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else: all_encoder_layer =", "not col in columns: if not header in tables: tables.append(header) tb_name[header] = len(tables)", "# print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = []", "for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t,", "masks = None input_nodes = nodes all_encoder_layer = None if not embedder: input_nodes", "for i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i])))", "34)]) \"\"\" l_hpu = [] for i_hds1 in i_hds: for i_hds11 in i_hds1:", "torch import torch.nn as nn import torch.nn.functional as F from .gated_graph_conv import GatedGraphConv", "bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb", "init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config)", "# end = l_n[0] # else: # end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1]))", "def gen_l_hpu(i_hds): \"\"\" # Treat columns as if it is a batch of", "# find table name correspond to foreign key if len(nodes)-1 in foreign_idx: #", "question all_hds = input_schema.column_names_embedder_input # table name . column tables = [] tb_name", "'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS': do_lower_case = False else:", "max_seq_length) if bert_input_version == 'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif", "1 for real tokens and 0 for padding tokens. Only real # tokens", "the index of 2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n = [] l_hs =", "i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS,", "+ 1) * hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:]", "+= tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i in relations: for j", "key have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes)", "...,] ] \"\"\" bS = len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h", "None if not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i in nodes] masks", "tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer: BERT outputs from", "* hS ed = (i_noln + 1) * hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]),", "for i in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] #", "== len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states = [] # for", "> 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size())", "if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer,", "= start index of i-th-1st-level-token in all_tokens. nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i]", "if not col in columns: if not header in tables: tables.append(header) tb_name[header] =", "for hds1 in all_hds: new_hds1 = current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1, i_hds1,", "num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h,", "add foreign key relation else: # column id columns[col] = len(nodes) -1 #", "print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation else: # column id columns[col] =", "l_n[0] == i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\ l_n,", "raise(\"not inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) # relations = input_schema.relations", "l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds,", "\\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length,", "l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length): \"\"\" Here,", "t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt += 1 # schema_token_states1 =", "i in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) # all_encoder_layer", "in i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert", "get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE:", "[-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three edge types, we", "col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1 in foreign_idx", "foreign key relation # relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col] = len(nodes) -1", ":param max_seq_length: max input token length OUTPUT tokens: BERT input tokens nlu_tt: WP-tokenized", "num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def", "if len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i in", "# Fill zero for non-exist part. l_n1 = l_n[b] i_nlu1 = i_nlu[b] for", "hds1): tokens = [] segment_ids = [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu =", "current_hds1 = [] for hds1 in all_hds: new_hds1 = current_hds1 + [hds1] tokens1,", "= True no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt')", "i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat columns as if it is a", "(input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for", "wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual", "choose latter one) base = len(nodes) nodes += tables relations[0] = relations[0] #column", "= [] # orig_to_tok_idx[i] = start index of i-th-1st-level-token in all_tokens. nlu_tt1 =", "len(utterance_states) == len(input_sequence) # schema_token_states = [] # cnt = -1 # for", "print(nodes,relations) # for (i, token) in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) # current_hds1", "len(utterance_states) == len(input_sequence) schema_token_states = [] cnt = -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds:", "nodes.append('*') for i in all_hds: # print(i.split('.')) if i != \"*\" and len(i.split('.'))", "tokens = [] segment_ids = [] input_mask = [] i_nlu = [] #", "def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual output", "new_hds1) if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1 =", "get the wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h =", "nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) #", "= get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert", ". column tables = [] tb_name = {} # index of header in", "# print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in", "to column (we choose latter one) base = len(nodes) nodes += tables relations[0]", "= get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size,", "input_sequence # segmented question all_hds = input_schema.column_names_embedder_input # table name . column nlu_tt1", "= [] for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = []", "= max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) #", "'*': # print(header,col) # first add headers nodes.append(i) # if not col in", "len(nodes),foreign_key,foreign_idx) # exit(0) for i in relations: for j in i: j[0] +=", "column to column or table to column (we choose latter one) base =", "hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of", "hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the representation of each tokens. \"\"\" bS", "in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] for hds1 in all_hds: new_hds1", "t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the", "for i,item in enumerate(foreign_key): relations[2][i][0] = item # nodes += tables # print(1111111,input_schema.column_names_surface_form,relations,", "base # tokenize nodes to feed into model masks = [] ## update", "get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] #", "= [] t_to_tt_idx_hds = [] for b, nlu_t1 in enumerate(nlu_t): hds1 = hds[b]", "relations[1] = [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in", "tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual output of all tokens", "i_nlu[b] for i_noln in range(num_out_layers_n): i_layer = num_hidden_layers - 1 - i_noln st", "hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) # relations = input_schema.relations # TODO: feed", "input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three", "# 4. Generate BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5.", "WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1 [SEP] col2", "Input masks # The mask has 1 for real tokens and 0 for", "= [] current_table = '' for hds1 in all_hds: hds1_table = hds1.split('.')[0].strip() if", "len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def", "segment_ids.append(0) for token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds", "[] max_seq_length = 0 nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 = []", "of contextual vector later. i_hds = [] doc_tokens = [] nlu_tt = []", "input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items) Return: relations(lists of", "nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token)", "len(input_mask1) == max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1)", "for i, hds11 in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 = [] sub_tok =", "print(1111111,nlu_t1,all_hds) for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] for", "foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign key relation can be", "all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0)", "= get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0]", "len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert,", "0:(i_hds11[1] - i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert,", "nlu_tt: WP-tokenized input natural language questions orig_to_tok_index: map the index of 1st-level-token to", "-1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1 in foreign_idx or len(nodes)-1", "t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 =", "get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx)", ":param hs_t: None or 1st-level tokenized headers :param max_seq_length: max input token length", "from .gated_graph_conv import GatedGraphConv from .bert import tokenization as tokenization from .bert.modeling import", "= l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states", "0 for padding tokens. Only real # tokens are attended to. input_mask1 =", "all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i", "# current_hds1 = [] # for hds1 in all_hds: # new_hds1 = current_hds1", "as F from .gated_graph_conv import GatedGraphConv from .bert import tokenization as tokenization from", "+= len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema =", "for i in gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt,", "to the index of 2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n = [] l_hs", "# if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema def", "if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema,", "utterance_states = [] # for i in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] #", "input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with", "len(input_sequence) # schema_token_states = [] # cnt = -1 # for t_to_tt_idx_hds1 in", "the start position of original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for sub_token in", "tb_name = {} # index of header in node - len(nodes) columns =", "hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h] = ', wemb_h.size())", "st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds, l_hpu, l_hs, hS, num_hidden_layers,", "\"\"\" # Treat columns as if it is a batch of natural language", "nn import torch.nn.functional as F from .gated_graph_conv import GatedGraphConv from .bert import tokenization", "tokenization as tokenization from .bert.modeling import BertConfig, BertModel device = torch.device(\"cuda\" if torch.cuda.is_available()", "# print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for * # nodes += tables base", "range(bS): # [B, max_len, dim] # Fill zero for non-exist part. l_n1 =", "return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized", "== 1 and len(set(i_nlu)) == 1 assert l_n[0] == i_nlu[0][1] - i_nlu[0][0] return", "+ [hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1)", "for i_noln in range(num_out_layers_n): i_layer = num_hidden_layers - 1 - i_noln st =", "\"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for *", "t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states =", "== len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert", "= len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] * len(sub_tok) if i < len(hds1)-1:", "0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) #", "+= tokenizer.tokenize(token) current_hds1 = [] for hds1 in all_hds: new_hds1 = current_hds1 +", "in input_schema.table_schema['foreign_keys']] primary_idx = [ i for i in input_schema.table_schema['primary_keys']] foreign_key = [-1]", ":param tokenizer: WordPiece toknizer :param nlu: Question :param nlu_t: CoreNLP tokenized nlu. :param", "in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for", "sequence length. if len(nlu_t) == 1: max_seq_length = len(input_ids1) while len(input_ids1) < max_seq_length:", "question all_hds = input_schema.column_names_embedder_input # table name . column nlu_tt1 = [] #", "j in i: j[0] += base # tokenize nodes to feed into model", "= [] hds = [] nlu_t1 = input_sequence # segmented question all_hds =", "print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file,", "for token in i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col)", "assume primary key have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if", "-1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation # relations[0].append([tb_name[header],columns[col]]) else: #", "elif bert_input_version == 'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h,", "max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for", "if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 = [hds1] #", "nodes += tables relations[0] = relations[0] #column relations[1] = [[i,j] for i,j in", "= [] relations = [[],[],[]] # three edge types, we use tb_name.col as", "of tokenized db items) Return: relations(lists of list of related columns), inner list", "t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP] col1 [SEP] col2 [SEP] ...col-n [SEP] #", "WordPiece toknizer :param nlu: Question :param nlu_t: CoreNLP tokenized nlu. :param hds: Headers", "> max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 = [hds1] # else: #", "in range(num_out_layers_n): i_layer = num_hidden_layers - 1 - i_noln st = i_noln *", "model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) ==", "= nodes all_encoder_layer = None if not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for", "tokens. sub_tokens = tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are", "relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version == 'v1': nlu_t,", "', bS, l_n_max, hS * num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device)", "nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table =", "segmented question all_hds = input_schema.column_names_embedder_input # table name . column tables = []", "# relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col] = len(nodes) -1 # assume primary", "] returns first sub-token segement of i-th-1st-level-token for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append(", "= [] t_to_tt_idx_hds1 = [] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to use it", "print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for * # nodes += tables base =", "index to retreive the position of contextual vector later. i_hds = [] doc_tokens", "generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table", "get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers,", "= torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4.", "foreign key relation else: # column id columns[col] = len(nodes) -1 # assume", "GatedGraphConv from .bert import tokenization as tokenization from .bert.modeling import BertConfig, BertModel device", "['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) #", "in enumerate(i_hds1): b_pu += 1 for i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers -", "get contextual output of all tokens from bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\", "torch.tensor([i], dtype=torch.long).to(device) for i in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer=", "t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t = [] hds = [] nlu_t1", "tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length", "segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1)", "* num_out_layers_n] = ', bS, l_n_max, hS * num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max,", "enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] for hds1 in all_hds: new_hds1 =", "(pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for (i,", "tables = [] tb_name = {} # index of header in node -", "num_out_layers_h] = ', wemb_h.size()) b_pu = -1 for b, i_hds1 in enumerate(i_hds): for", "[] hds = [] max_seq_length = 0 nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input", "raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def", "-1 # assume primary key have \"id\" if col.find(\"id\") != -1: # print('primary')", "in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = []", "'cL' or params.bert_type_abb == 'mcS': do_lower_case = False else: do_lower_case = True no_pretraining", "ed = (i_nolh + 1) * hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\", "table name . column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds) for (i, token) in", "+ tables # if len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes)", "representation of each tokens. \"\"\" bS = len(l_n) l_n_max = max(l_n) # print('wemb_n:", "= len(tokens) t_to_tt_idx_hds11 = [] sub_tok = [] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok))", "nodes += tables base = len(nodes) nodes += tables for i in relations:", "current_hds1 = new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def", "batch of natural language utterance with batch-size = # of columns * #", "= [] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_surface_form # table", "- 1 in foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name", "current_hds1 = [hds1] current_table = hds1_table if len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1,", "= False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH,", "input_sequence # segmented question all_hds = input_schema.column_names_embedder_input # table name . column tables", "all_hds = input_schema.column_names_embedder_input # table name . column nlu_tt1 = [] # print(1111111,nlu_t1,all_hds)", "l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h] = ',", "# find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation else:", "# orig_to_tok_idx[i] = start index of i-th-1st-level-token in all_tokens. nlu_tt1 = [] #", "in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length):", "# add foreign key relation else: # column id columns[col] = len(nodes) -1", "nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_surface_form # table name.column tables", "= model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate l_hpu from i_hds l_hpu = gen_l_hpu(i_hds)", "in all_encoder_layer] # get hidden layer output as representation for each schema items", "not (len(nodes) - 1 in foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find", "# else: # end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) ==", "i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1)", "index of 2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n = [] l_hs = []", "relations[0] = relations[0] #column relations[1] = [[i,j] for i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] =", "all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n,", "[1] * len(input_ids1) # 3. Zero-pad up to the sequence length. if len(nlu_t)", "for b1, i_hds11 in enumerate(i_hds1): b_pu += 1 for i_nolh in range(num_out_layers_h): i_layer", "attended to. input_mask1 = [1] * len(input_ids1) # 3. Zero-pad up to the", "all_encoder_layer],0) else: all_encoder_layer = torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for i in input_nodes],0)", "return nlu_t, hds, max_seq_length def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph encoding without", "= prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': raise(\"not inplemented\") nlu_t,", "in t_to_tt_idx_hds1: # cnt += 1 # schema_token_states1 = [] # for i", "= new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks,", "# of batch_size i_hds = [(17, 18), (19, 21), (22, 23), (24, 25),", "# tokens are attended to. input_mask1 = [1] * len(input_ids1) # 3. Zero-pad", "Convert to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask = torch.tensor(input_mask, dtype=torch.long).to(device) all_segment_ids =", "tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i in relations: for j in", "in all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1) else: tokens1, segment_ids1,", "# for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt += 1 # schema_token_states1 = []", "WordPiece tt_to_t_idx1 = [] # number indicates where sub-token belongs to in 1st-level-tokens", "i.split('.') # if col.strip() != '*': # print(header,col) # first add headers nodes.append(i)", "t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert", "[] for hds1 in all_hds: new_hds1 = current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1,", "segment_ids.append(1) else: raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds,", "output as representation for each schema items relations = [torch.tensor(i, dtype=torch.long).to(device) for i", "for i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else: all_encoder_layer", "end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1)", "(WP) tokenizer and fed into BERT. INPUT :param model_bert: :param tokenizer: WordPiece toknizer", "items) Return: relations(lists of list of related columns), inner list corresponds to edge", "sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) #", "len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence,", "into model masks = [] ## update new schema new_schema = input_schema.column_names_surface_form if", "key relation can be column to column or table to column (we choose", "hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def", "input_sequence, input_schema) # relations = input_schema.relations # TODO: feed into gnn and return", "relations(lists of list of related columns), inner list corresponds to edge type \"\"\"", "[] # cnt = -1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11", "key relation # relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col] = len(nodes) -1 #", "segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask", "map. \"\"\" l_n = [] l_hs = [] # The length of columns", "columns with index in nodes as value # take redundancy for foreign key", "= item # nodes += tables # print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i", "[gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] # get hidden layer output as representation for", "enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 = [] sub_tok = [] for sub_tok1 in", "new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version == 'v1': nlu_t, hds", "import GatedGraphConv from .bert import tokenization as tokenization from .bert.modeling import BertConfig, BertModel", "# print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for", "assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return utterance_states,", "i_nlu1 = i_nlu[b] for i_noln in range(num_out_layers_n): i_layer = num_hidden_layers - 1 -", "first sub-token segement of i-th-1st-level-token for (i, token) in enumerate(nlu_t1): t_to_tt_idx1.append( len(nlu_tt1)) #", "torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input)", "nodes += tables for i in relations: for j in i: j[0] +=", "torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0)", "and len(i.split('.')) > 1: header,col = i.split('.') # if col.strip() != '*': #", "nlu_t1 in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization using WordPiece", "tokenized headers :param max_seq_length: max input token length OUTPUT tokens: BERT input tokens", "!= -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation else: # column", "import BertConfig, BertModel device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH", "# current_hds1 = new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1)", "schema items relations = [torch.tensor(i, dtype=torch.long).to(device) for i in relations] # print(333333,relations, all_encoder_layer.size())", "len(input_ids1) == max_seq_length assert len(input_mask1) == max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1)", "for i in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1:", "* num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for b in range(bS):", "torch.nn as nn import torch.nn.functional as F from .gated_graph_conv import GatedGraphConv from .bert", "where sub-token belongs to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i]", "max_seq_length) elif bert_input_version == 'v2': nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n,", "columns: if not header in tables: tables.append(header) tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1", "= num_hidden_layers - 1 - i_nolh st = i_nolh * hS ed =", "tokens nlu_tt: WP-tokenized input natural language questions orig_to_tok_index: map the index of 1st-level-token", "hS ed = (i_nolh + 1) * hS wemb_h[b_pu, 0:(i_hds11[1] - i_hds11[0]), st:ed]", "all_encoder_layer = None if not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i in", "or 1st-level tokenized headers :param max_seq_length: max input token length OUTPUT tokens: BERT", "- len(nodes) columns = {} nodes = [] relations = [[],[],[]] # three", "assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) # schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema", "'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb ==", "all tokens from bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\", "as embedding # print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i in", "encoding without input_sequence dependency nodes=relations=new_schema=None if bert_input_version == 'v1': nodes, relations, new_schema =", "in all_encoder_layer][0]) # all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) # all_encoder_layer =", "hidden layer output as representation for each schema items relations = [torch.tensor(i, dtype=torch.long).to(device)", "have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for", "== len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu = (i_st_nlu, i_ed_nlu) return tokens,", "output = [i for i in gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h, l_n,", "num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence) # assert", "input_schema): nlu_t = [] hds = [] max_seq_length = 0 nlu_t1 = input_sequence", "gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and len(set(i_nlu)) == 1 assert l_n[0] == i_nlu[0][1]", "relations[0].append([tb_name[header],columns[col]]) # for * # nodes += tables base = len(nodes) nodes +=", "torch.nn.functional as F from .gated_graph_conv import GatedGraphConv from .bert import tokenization as tokenization", "if no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer,", "segment_ids += [1] * len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0) elif i", "hS, num_hidden_layers, all_encoder_layer, num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2,", "= [] foreign_idx = [ i for i,j in input_schema.table_schema['foreign_keys']] primary_idx = [", "table name . column tables = [] tb_name = {} # index of", "enumerate(i_hds): for b1, i_hds11 in enumerate(i_hds1): b_pu += 1 for i_nolh in range(num_out_layers_h):", "columns = {} nodes = [] relations = [[],[],[]] # three edge types,", "nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db", "i_noln * hS ed = (i_noln + 1) * hS wemb_n[b, 0:(i_nlu1[1] -", "[] for i, hds11 in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 = [] sub_tok", "if len(nlu_t) == 1: max_seq_length = len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0)", "end indices of headers # get the wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size,", "> 1 assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer)) # all_encoder_layer", "nlu: Question :param nlu_t: CoreNLP tokenized nlu. :param hds: Headers :param hs_t: None", "to. input_mask1 = [1] * len(input_ids1) # 3. Zero-pad up to the sequence", "sub_tok = [] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens", "l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t", "in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 = [] sub_tok = [] for sub_tok1", "sub_tokens = tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further", "= get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu,", "= './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS':", "bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case)", "bS = len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max,", "t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states) == len(input_sequence) schema_token_states = [] cnt = -1", "hds: Headers :param hs_t: None or 1st-level tokenized headers :param max_seq_length: max input", "i for i in input_schema.table_schema['primary_keys']] foreign_key = [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations", "relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': raise(\"not", "bS = len(l_n) l_n_max = max(l_n) # print('wemb_n: [bS, l_n_max, hS * num_out_layers_n]", "t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ] \"\"\" bS = len(l_hs) l_hpu_max = max(l_hpu)", "foreign_key = [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three edge", "params.bert_type_abb == 'mcS': do_lower_case = False else: do_lower_case = True no_pretraining = False", "name to columns with index in nodes as value # take redundancy for", "'' for hds1 in all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1)", "= [i for i in gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h, l_n, l_hpu,", "t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input)", "can be column to column or table to column (we choose latter one)", "len(tokens) # to use it later segment_ids.append(0) for token in nlu1_tok: tokens.append(token) segment_ids.append(0)", "input_nodes = nodes all_encoder_layer = None if not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device)", "...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ] \"\"\" bS = len(l_hs) l_hpu_max", "in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i] = start index of", "len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None", "# assume primary key have \"id\" if col.find(\"id\") != -1: # print('primary') relations[1].append([tb_name[header],columns[col]])", "key relation else: # column id columns[col] = len(nodes) -1 # assume primary", "if use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names'])", "in input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version", "language utterance with batch-size = # of columns * # of batch_size i_hds", "new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks = [] new_schema = [] for", "masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = [] hds = [] max_seq_length", "1 schema_token_states1 = [] for i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i", "# all_doc_tokens are further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) #", "t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds)", "no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint =", "# tokenize nodes to feed into model masks = [] new_schema = []", "print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i])", "=[ torch.tensor([i], dtype=torch.long).to(device) for i in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad():", "bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h)", "# segmented question all_hds = input_schema.column_names_embedder_input # table name . column nlu_tt1 =", "import tokenization as tokenization from .bert.modeling import BertConfig, BertModel device = torch.device(\"cuda\" if", "model_bert.to(device) return model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens = [] segment_ids", "= num_hidden_layers - 1 - i_noln st = i_noln * hS ed =", "[] # index to retreive the position of contextual vector later. i_hds =", "get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL':", "of headers # get the wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer,", "to column or table to column (we choose latter one) base = len(nodes)", "[] for i in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end", "further tokenized using WordPiece tokenizer nlu_tt.append(nlu_tt1) tt_to_t_idx.append(tt_to_t_idx1) t_to_tt_idx.append(t_to_tt_idx1) l_n.append(len(nlu_tt1)) # [CLS] nlu [SEP]", "input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i in input_schema.table_schema['table_names_original']]", "'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS' or", "len(input_schema.column_names_embedder_input) # utterance_states = [] # for i in range(len(t_to_tt_idx)): # start =", "nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list", "= [gnn_encoder1(i.squeeze()) for i in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in", "input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) if bert_input_version ==", "i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if len(nodes) <=1: print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes)", "position of contextual vector later. i_hds = [] doc_tokens = [] nlu_tt =", "segment_ids = [] input_mask = [] i_nlu = [] # index to retreive", "nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def prepare_input(tokenizer, input_sequence, input_schema, max_seq_length): nlu_t = [] hds", "num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the representation of each tokens. \"\"\" bS =", "all_doc_tokens[ indicate the start position of original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for", "= [torch.tensor(i, dtype=torch.long).to(device) for i in relations] # print(333333,relations, all_encoder_layer.size()) output = [i", "of i-th-1st-level-token in all_tokens. nlu_tt1 = [] # all_doc_tokens[ orig_to_tok_idx[i] ] returns first", "[ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ] \"\"\" bS", "orig_to_tok_idx[i] ] returns first sub-token segement of i-th-1st-level-token for (i, token) in enumerate(nlu_t1):", "new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': raise(\"not inplemented\")", "in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for i in all_encoder_layer] # get hidden", "i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t,", "t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states = [] #", "to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i] = start index", "schema new_schema = input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form + tables", "embedding # print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds:", "of 2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n = [] l_hs = [] #", "hds.append(current_hds1) # current_hds1 = [hds1] # else: # current_hds1 = new_hds1 # if", "= torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in", "in relations: for j in i: j[0] += base # tokenize nodes to", "relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0] = item # nodes +=", "[] # number indicates where sub-token belongs to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1", "else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states)", "t_to_tt_idx1 = [] # orig_to_tok_idx[i] = start index of i-th-1st-level-token in all_tokens. nlu_tt1", "ed = (i_noln + 1) * hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] =", "foreign key relation can be column to column or table to column (we", "# cnt = -1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in", "up to the sequence length. if len(nlu_t) == 1: max_seq_length = len(input_ids1) while", "nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for (i, token)", "input_schema.column_names_surface_form # table name.column tables = [] tb_name = {} # index of", "has 1 for real tokens and 0 for padding tokens. Only real #", "in foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name correspond to", "doc_tokens = [] nlu_tt = [] t_to_tt_idx = [] tt_to_t_idx = [] t_to_tt_idx_hds", "len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor", "len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table = hds1_table if len(current_hds1) > 0:", "= '' for hds1 in all_hds: hds1_table = hds1.split('.')[0].strip() if hds1_table == current_table:", "= len(nodes) nodes += tables for i in relations: for j in i:", "assert len(new_schema) ==len(nodes) for i in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] =", "i in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) #", "range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1: # end =", "tokens += sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] * len(sub_tok)", "None if use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col +=", "col in columns: if not header in tables: tables.append(header) tb_name[header] = len(tables) -1", "= [hds1] else: current_hds1 = new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return", "as tokenization from .bert.modeling import BertConfig, BertModel device = torch.device(\"cuda\" if torch.cuda.is_available() else", "in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states = []", "else: do_lower_case = True no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file =", "# end = l_hpu[cnt] # else: # end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0))", "-1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt", "tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device)", "in tables: tables.append(header) tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1 # add column name", "!= -1: # print('primary') relations[1].append([tb_name[header],columns[col]]) if not (len(nodes) - 1 in foreign_idx or", "# table name . column tables = [] tb_name = {} # index", "\"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS':", "new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] *", "Here, input is toknized further by WordPiece (WP) tokenizer and fed into BERT.", "find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation # relations[0].append([tb_name[header],columns[col]])", "i in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i]", ".bert import tokenization as tokenization from .bert.modeling import BertConfig, BertModel device = torch.device(\"cuda\"", "[] input_mask = [] i_nlu = [] # index to retreive the position", "[] relations = [[],[],[]] # three edge types, we use tb_name.col as embedding", "into BERT. INPUT :param model_bert: :param tokenizer: WordPiece toknizer :param nlu: Question :param", "hds = [] nlu_t1 = input_sequence # segmented question all_hds = input_schema.column_names_embedder_input #", "# to use it later segment_ids.append(0) for token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu", "input_schema, max_seq_length) elif bert_input_version == 'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer,", "relations[2].append([tb_name[header],columns[col]]) # add foreign key relation # relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col]", "latter one) base = len(nodes) nodes += tables relations[0] = relations[0] #column relations[1]", "t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: # cnt += 1 #", "sub-token belongs to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i] =", "i_hds = [] for i, hds11 in enumerate(hds1): i_st_hd = len(tokens) t_to_tt_idx_hds11 =", "index of 1st-level-token to the index of 2nd-level-token tok_to_orig_index: inverse map. \"\"\" l_n", "for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1 schema_token_states1 =", "Nodes(list of tokenized db items) Return: relations(lists of list of related columns), inner", "'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16', 'mcS': 'multi_cased_L-12_H-768_A-12'} bert_type = map_bert_type_abb[params.bert_type_abb] if params.bert_type_abb == 'cS'", "# all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1] for", "= prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': nlu_t, hds, max_seq_length =", "wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer,", "False else: do_lower_case = True no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file", "tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0] assert len(t_to_tt_idx) == len(input_sequence)", "in range(len(nodes)): # new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i])", "sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids", "# print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i]", "current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length", "no_pretraining: pass else: model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu')) print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config", "if col in columns: # find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add", "wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n,", "i in all_encoder_layer] # get hidden layer output as representation for each schema", "all_segment_ids = torch.tensor(segment_ids, dtype=torch.long).to(device) # 4. Generate BERT output. all_encoder_layer, pooled_output = model_bert(all_input_ids,", "- i_noln st = i_noln * hS ed = (i_noln + 1) *", "hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization using WordPiece tt_to_t_idx1 = [] # number", "def generate_inputs(tokenizer, nlu1_tok, hds1): tokens = [] segment_ids = [] t_to_tt_idx_hds1 = []", "[CLS] vec. # tokens: BERT intput tokens # i_nlu: start and end indices", "masks # The mask has 1 for real tokens and 0 for padding", "tokens: BERT input tokens nlu_tt: WP-tokenized input natural language questions orig_to_tok_index: map the", "+= ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations)", "Treat columns as if it is a batch of natural language utterance with", "hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks = [] new_schema", "sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) # all_doc_tokens are further tokenized using WordPiece tokenizer", "print(input_schema.column_names_embedder_input) print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1 assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10]))", "= -1 for t_to_tt_idx_hds1 in t_to_tt_idx_hds: for t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1", "def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks = [] new_schema = [] for i", "all_encoder_layer, pooled_output = model_bert(all_input_ids, all_segment_ids, all_input_mask) # 5. generate l_hpu from i_hds l_hpu", "# nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\"", "l_hpu = [] for i_hds1 in i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] -", "= sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max,", "list corresponds to edge type \"\"\" nlu_t = [] hds = [] nlu_t1", "> 0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length =", "\"\"\" nlu_t = [] hds = [] nlu_t1 = input_sequence # segmented question", "+= tokenizer.tokenize(token) current_hds1 = [] current_table = '' for hds1 in all_hds: hds1_table", "i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1", "= hds[b] l_hs.append(len(hds1)) # 1. 2nd tokenization using WordPiece tt_to_t_idx1 = [] #", "!= '*': # print(header,col) # first add headers nodes.append(i) # if not col", "i_hds = [] doc_tokens = [] nlu_tt = [] t_to_tt_idx = [] tt_to_t_idx", "os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file',", "foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name correspond to foreign", "torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS':", "= [-1] * len(foreign_idx) # print(input_schema.table_schema['foreign_keys']) relations = [[],[],[]] # three edge types,", "\\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\"", "# hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks = []", "> max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1 = new_hds1 if len(current_hds1)", "tokens: BERT intput tokens # i_nlu: start and end indices of question in", "len(input_sequence) assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input)", "assert l_n[0] == i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer, pooled_output, tokens, i_nlu, i_hds, \\", "BERT_PT_PATH = './model/bert/data/annotated_wikisql_and_PyTorch_bert_param' map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12', 'uL': 'uncased_L-24_H-1024_A-16', 'cS': 'cased_L-12_H-768_A-12', 'cL': 'cased_L-24_H-1024_A-16',", "all_hds: # new_hds1 = current_hds1 + [hds1] # tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1", "tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = [] hds", "hds = prepare_input(tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2': nlu_t, hds, max_seq_length", "in all_hds: # new_hds1 = current_hds1 + [hds1] # tokens1, segment_ids1, i_nlu1, i_hds1,", "base = len(nodes) nodes += tables relations[0] = relations[0] #column relations[1] = [[i,j]", "return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None input_nodes = nodes all_encoder_layer", "each schema items relations = [torch.tensor(i, dtype=torch.long).to(device) for i in relations] # print(333333,relations,", "segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat columns as if it", "# print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None input_nodes = nodes all_encoder_layer = None", "bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens = [] segment_ids = [] t_to_tt_idx_hds1 =", "from .bert.modeling import BertConfig, BertModel device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def", "nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table = hds1_table if len(current_hds1) > 0: tokens1,", "num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1,", "tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx)", "current_hds1 = [hds1] else: current_hds1 = new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1)", "num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1)", "nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]']", "hds1_table if len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1,", "in gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx,", "print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i in all_hds: # print(i.split('.'))", "[] sub_tok = [] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11)", "get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the representation of each tokens.", "take redundancy for foreign key if col in columns: # find('id') != -1", "return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get", "return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks = [] new_schema =", "t_to_tt_idx_hds = get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n, num_out_layers_h) t_to_tt_idx = t_to_tt_idx[0]", "of columns for each batch input_ids = [] tokens = [] segment_ids =", "schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return utterance_states, schema_token_states, relations else: return", "# end = t_to_tt_idx[i+1] # utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) # assert len(utterance_states) == len(input_sequence) #", "t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" # Treat columns as if it is a batch", "len(tables) -1 #columns[col]= len(nodes)-1 # add column name to columns with index in", "== max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) #", "\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds def get_wemb_n(i_nlu, l_n, hS,", "in range(len(t_to_tt_idx_hds11)): start = t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else:", "print(\"Load pre-trained parameters.\") model_bert.to(device) return model_bert, tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens", "segmented question all_hds = input_schema.column_names_embedder_input # table name . column nlu_tt1 = []", "new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema", "output of [CLS] vec. # tokens: BERT intput tokens # i_nlu: start and", "indicates where sub-token belongs to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = [] #", "hs_t: None or 1st-level tokenized headers :param max_seq_length: max input token length OUTPUT", "hds1 in all_hds: new_hds1 = current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1", "relation can be column to column or table to column (we choose latter", "torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i in all_encoder_layer],0) else:", "= torch.cat([torch.cat([embedder(token).unsqueeze(0) for token in i],0).mean(0).unsqueeze(0) for i in input_nodes],0) if len(nodes) <=1:", "t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for", "= len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for i, hds11 in enumerate(hds1): i_st_hd", "[[],[],[]] # three edge types, we use tb_name.col as embedding # print(relations) all_columns", "st:ed] \\ = all_encoder_layer[i_layer][b, i_hds11[0]:i_hds11[1],:] return wemb_h def get_wemb_bert(bert_config, model_bert, tokenizer, nlu_t, hds,", "segmented question all_hds = input_schema.column_names_surface_form # table name.column tables = [] tb_name =", "= input_sequence # segmented question all_hds = input_schema.column_names_surface_form # table name.column tables =", "nlu_tt1 = [] for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 =", "tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining: pass else:", "nlu_tt1 += tokenizer.tokenize(token) # current_hds1 = [] # for hds1 in all_hds: #", "indices of headers # get the wemb wemb_n = get_wemb_n(i_nlu, l_n, bert_config.hidden_size, bert_config.num_hidden_layers,", "for t_to_tt_idx_hds1 in t_to_tt_idx_hds) == len(input_schema.column_names_embedder_input) # assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # utterance_states", "for i in all_encoder_layer] # get hidden layer output as representation for each", "in nodes] masks = torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in", "and return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None input_nodes = nodes", "tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) # if len(segment_ids1) >", "# of columns * # of batch_size i_hds = [(17, 18), (19, 21),", "t_to_tt_idx_hds11 = [] sub_tok = [] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok +=", "tables: tables.append(header) tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1 # add column name to", "= len(tables) -1 #columns[col]= len(nodes)-1 # add column name to columns with index", "tokenizer.tokenize(token) # current_hds1 = [] # for hds1 in all_hds: # new_hds1 =", "relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col] = len(nodes) -1 # assume primary key", "to feed into model masks = [] ## update new schema new_schema =", "for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] current_table =", "if hds1_table == current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer,", "i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def get_bert_output(model_bert, tokenizer, nlu_t, hds,", "# schema_token_states = [] # cnt = -1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds:", "as if it is a batch of natural language utterance with batch-size =", "max_len, dim] # Fill zero for non-exist part. l_n1 = l_n[b] i_nlu1 =", "nodes all_encoder_layer = None if not embedder: input_nodes =[ torch.tensor([i], dtype=torch.long).to(device) for i", "= [] segment_ids = [] input_mask = [] i_nlu = [] # index", "foreign key if col in columns: # find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]])", "len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i in range(len(t_to_tt_idx)): start = t_to_tt_idx[i]", "[] i_nlu = [] # index to retreive the position of contextual vector", "model_bert, tokenizer, nlu_t, hds, max_seq_length, num_out_layers_n=1, num_out_layers_h=1): # get contextual output of all", "if i == len(t_to_tt_idx)-1: # end = l_n[0] # else: # end =", "belongs to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 = [] # orig_to_tok_idx[i] = start", "# find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation #", "tokenized nlu. :param hds: Headers :param hs_t: None or 1st-level tokenized headers :param", "cnt = -1 # for t_to_tt_idx_hds1 in t_to_tt_idx_hds: # for t_to_tt_idx_hds11 in t_to_tt_idx_hds1:", "# nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks", "end = l_hpu[cnt] # else: # end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) #", "== len(t_to_tt_idx)-1: end = l_n[0] else: end = t_to_tt_idx[i+1] utterance_states.append(torch.mean(wemb_n[:,start:end,:], dim=[0,1])) assert len(utterance_states)", "# i_nlu: start and end indices of question in tokens # i_hds: start", "bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_n) wemb_h = get_wemb_h(i_hds, l_hpu, l_hs, bert_config.hidden_size, bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return", "or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name correspond to foreign key", "= os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint)", "token in nlu1_tok: tokens.append(token) segment_ids.append(0) i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = []", "tables.append(header) tb_name[header] = len(tables) -1 #columns[col]= len(nodes)-1 # add column name to columns", "i_hds11 in enumerate(i_hds1): b_pu += 1 for i_nolh in range(num_out_layers_h): i_layer = num_hidden_layers", "for non-exist part. l_n1 = l_n[b] i_nlu1 = i_nlu[b] for i_noln in range(num_out_layers_n):", "columns for each batch input_ids = [] tokens = [] segment_ids = []", "columns), inner list corresponds to edge type \"\"\" nlu_t = [] hds =", "# Treat columns as if it is a batch of natural language utterance", "= generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1]", "[num_of_all_hds, l_hpu_max, hS * num_out_layers_h] = ', wemb_h.size()) b_pu = -1 for b,", "# utterance_states = [] # for i in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i]", "= os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file) print('vocab_file', vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer", "toknizer :param nlu: Question :param nlu_t: CoreNLP tokenized nlu. :param hds: Headers :param", "print(333333,relations, all_encoder_layer.size()) output = [i for i in gnn(all_encoder_layer,relations)] # print(output) # wemb_n,", "i,j in enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0] = item", "inverse map. \"\"\" l_n = [] l_hs = [] # The length of", "hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1 = new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1)", "original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for sub_token in sub_tokens: tt_to_t_idx1.append(i) nlu_tt1.append(sub_token) #", "<filename>model/utils_bert.py # modified from https://github.com/naver/sqlova import os, json import random as rd from", "1 in foreign_idx or len(nodes)-1 in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name correspond", "print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None input_nodes = nodes all_encoder_layer = None if", "assert len(nodes) > 1 assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123, all_encoder_layer.size(),type(all_encoder_layer))", "i_layer = num_hidden_layers - 1 - i_nolh st = i_nolh * hS ed", "natural language questions orig_to_tok_index: map the index of 1st-level-token to the index of", "= hds1.split('.')[0].strip() if hds1_table == current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1", "As if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ... [t2-c1-t1, ...,] ]", "tb_name.col as embedding # print(relations) all_columns = {} # print(1111111,nlu_t1,all_hds) nodes.append('*') for i", "token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] for hds1 in all_hds:", "nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1 = new_hds1 if len(current_hds1) > 0:", "tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1]", "b1, i_hds11 in enumerate(i_hds1): b_pu += 1 for i_nolh in range(num_out_layers_h): i_layer =", "start = t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1: # end = l_n[0] #", "with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer = torch.cat([gnn_encoder1(i.unsqueeze(0))[0][1][0].unsqueeze(0) for i", "i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 =", "assert len(input_mask1) == max_seq_length assert len(segment_ids1) == max_seq_length input_ids.append(input_ids1) tokens.append(tokens1) segment_ids.append(segment_ids1) input_mask.append(input_mask1) i_nlu.append(i_nlu1)", "num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn above final output layer #add input schema", "= [] tt_to_t_idx = [] t_to_tt_idx_hds = [] for b, nlu_t1 in enumerate(nlu_t):", "l_hpu[cnt] # else: # end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1)", "batch_size i_hds = [(17, 18), (19, 21), (22, 23), (24, 25), (26, 29),", "[] # The length of columns for each batch input_ids = [] tokens", "len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 = [hds1] # else:", "tokens # i_nlu: start and end indices of question in tokens # i_hds:", "[] t_to_tt_idx_hds = [] for b, nlu_t1 in enumerate(nlu_t): hds1 = hds[b] l_hs.append(len(hds1))", "indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) ==", "import deepcopy import torch import torch.nn as nn import torch.nn.functional as F from", "len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0) assert len(input_ids1) == max_seq_length assert", "params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS': do_lower_case = False else: do_lower_case =", "-1 #columns[col]= len(nodes)-1 # add column name to columns with index in nodes", "relations[1].append([tb_name[header],columns[col]]) else: relations[0].append([tb_name[header],columns[col]]) # for * # nodes += tables base = len(nodes)", "print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] +=", "t_to_tt_idx_hds11[i] if i == len(t_to_tt_idx_hds11)-1: end = l_hpu[cnt] else: end = t_to_tt_idx_hds11[i+1] schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:],", "= (i_st_nlu, i_ed_nlu) return tokens, segment_ids, i_nlu, i_hds, t_to_tt_idx_hds1 def gen_l_hpu(i_hds): \"\"\" #", "= new_hds1 if len(current_hds1) > 0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer,", "0: nlu_t.append(nlu_t1) hds.append(current_hds1) return nlu_t, hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return:", "(i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] current_table = ''", "len(nodes) columns = {} nodes = [] relations = [[],[],[]] # three edge", "schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn:", "input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form + tables # if len(new_schema)", "list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i in range(len(t_to_tt_idx)): start", "Fill zero for non-exist part. l_n1 = l_n[b] i_nlu1 = i_nlu[b] for i_noln", "masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i])", "bS, l_n_max, hS * num_out_layers_n) wemb_n = torch.zeros([bS, l_n_max, hS * num_out_layers_n]).to(device) for", "len(nodes) columns = {} nodes = [] foreign_idx = [ i for i,j", "hds def prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items)", ":param hds: Headers :param hs_t: None or 1st-level tokenized headers :param max_seq_length: max", "rd from copy import deepcopy import torch import torch.nn as nn import torch.nn.functional", "# schema_token_states.append(schema_token_states1) # assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer,", "# print(1111111,input_schema.column_names_surface_form,relations, len(nodes),foreign_key,foreign_idx) # exit(0) for i in relations: for j in i:", "the representation of each tokens. \"\"\" bS = len(l_n) l_n_max = max(l_n) #", "tokenizer, bert_config def generate_inputs(tokenizer, nlu1_tok, hds1): tokens = [] segment_ids = [] t_to_tt_idx_hds1", "columns * # of batch_size i_hds = [(17, 18), (19, 21), (22, 23),", "== len(t_to_tt_idx)-1: # end = l_n[0] # else: # end = t_to_tt_idx[i+1] #", "range(num_out_layers_n): i_layer = num_hidden_layers - 1 - i_noln st = i_noln * hS", "input token length OUTPUT tokens: BERT input tokens nlu_tt: WP-tokenized input natural language", "enumerate(input_schema.table_schema['primary_keys'])]#primary relations[2] = input_schema.table_schema['foreign_keys']#foriegn for i,item in enumerate(foreign_key): relations[2][i][0] = item # nodes", "of header in node - len(nodes) columns = {} nodes = [] relations", "i in range(len(t_to_tt_idx)): # start = t_to_tt_idx[i] # if i == len(t_to_tt_idx)-1: #", "num_hidden_layers - 1 - i_nolh st = i_nolh * hS ed = (i_nolh", "len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for i in range(len(nodes)): # new_schema.append(nodes[i])", "[B, max_len, dim] # Fill zero for non-exist part. l_n1 = l_n[b] i_nlu1", "t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok += tokenizer.tokenize(sub_tok1) t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd))", "i in relations: for j in i: j[0] += base # tokenize nodes", "in nodes as value # take redundancy for foreign key if col in", "input_schema.table_schema['table_names'] input_schema.num_col += len(input_schema.table_schema['table_names']) input_schema.column_names_surface_form += [i.lower() for i in input_schema.table_schema['table_names_original']] nodes, relations,", "# all_encoder_layer = all_encoder_layer.permute(2,1,0) # print(all_encoder_layer.size()) # print([gnn_encoder1(i.unsqueeze(0))[0][1][0] for i in all_encoder_layer][0]) #", "[bS, l_n_max, hS * num_out_layers_n] = ', bS, l_n_max, hS * num_out_layers_n) wemb_n", "max_seq_length: max input token length OUTPUT tokens: BERT input tokens nlu_tt: WP-tokenized input", "new_schema = input_schema.column_names_surface_form if len(new_schema) != len(nodes): new_schema = input_schema.column_names_surface_form + tables #", "[torch.tensor(i, dtype=torch.long).to(device) for i in relations] # print(333333,relations, all_encoder_layer.size()) output = [i for", "len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input masks # The mask", "{} nodes = [] relations = [[],[],[]] # three edge types, we use", "bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file',", "def get_gnn_encoding(tokenizer,model_bert,input_sequence,input_schema,gnn,gnn_encoder1,embedder=None,bert_input_version='v1',num_out_layers_h=1, max_seq_length=512,num_out_layers_n=1): # only get graph encoding without input_sequence dependency nodes=relations=new_schema=None if", "max_seq_length = max(max_seq_length, len(segment_ids1)) nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] current_table = hds1_table if", "input_schema, max_seq_length): nlu_t = [] hds = [] nlu_t1 = input_sequence # segmented", "t_to_tt_idx = [] tt_to_t_idx = [] t_to_tt_idx_hds = [] for b, nlu_t1 in", "(24, 25), (26, 29), (30, 34)]) \"\"\" l_hpu = [] for i_hds1 in", "def get_wemb_n(i_nlu, l_n, hS, num_hidden_layers, all_encoder_layer, num_out_layers_n): \"\"\" Get the representation of each", "hS * num_out_layers_n]).to(device) for b in range(bS): # [B, max_len, dim] # Fill", "nodes,relations, new_schema def prepare_input_gnn2(schema,tokenizer): nodes = schema.nodes masks = [] new_schema = []", "in all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in all_encoder_layer] # all_encoder_layer =", "!= -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation # relations[0].append([tb_name[header],columns[col]]) else:", "add gnn above final output layer #add input schema table # print(11111111,input_schema.column_names_embedder_input,input_schema.column_names_surface_form) relations", "without input_sequence dependency nodes=relations=new_schema=None if bert_input_version == 'v1': nodes, relations, new_schema = prepare_input_gnn(", "column (we choose latter one) base = len(nodes) nodes += tables relations[0] =", "len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds, l_hpu_max, hS *", "from https://github.com/naver/sqlova import os, json import random as rd from copy import deepcopy", "= [hds1] # else: # current_hds1 = new_hds1 # if len(current_hds1) > 0:", "from copy import deepcopy import torch import torch.nn as nn import torch.nn.functional as", "prepare_input_gnn0(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12): \"\"\" Return: Nodes(list of tokenized db items) Return: relations(lists", "i in all_hds: # print(i.split('.')) if i != \"*\" and len(i.split('.')) > 1:", "# print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation # relations[0].append([tb_name[header],columns[col]]) else: # column", "tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert,", "\\ nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds = get_bert_output(model_bert, tokenizer, nlu_t, hds, max_seq_length) # all_encoder_layer:", "one) base = len(nodes) nodes += tables relations[0] = relations[0] #column relations[1] =", "t_to_tt_idx_hds1.append(t_to_tt_idx_hds11) tokens += sub_tok i_ed_hd = len(tokens) i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] *", "natural language utterance with batch-size = # of columns * # of batch_size", "input natural language questions orig_to_tok_index: map the index of 1st-level-token to the index", "start position of original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for sub_token in sub_tokens:", "0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return nodes,relations,masks, new_schema def prepare_input_gnn(tokenizer, input_sequence, input_schema, max_seq_length,pad_len=12):", "True no_pretraining = False bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint", "all_encoder_layer],0) # all_encoder_layer = [gnn_encoder1(i.squeeze()) for i in all_encoder_layer] # all_encoder_layer = [gnn_encoder1(torch.cat(i,1))[0][1]", "vocab_file) print('init_checkpoint', init_checkpoint) bert_config = BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert", "== len(input_schema.column_names_embedder_input[cnt].split()) schema_token_states.append(schema_token_states1) assert len(schema_token_states) == len(input_schema.column_names_embedder_input) if use_gnn: return utterance_states, schema_token_states, relations", "tt_to_t_idx1 = [] # number indicates where sub-token belongs to in 1st-level-tokens (here,", "1st-level tokenized headers :param max_seq_length: max input token length OUTPUT tokens: BERT input", "indicate the start position of original 'white-space' tokens. sub_tokens = tokenizer.tokenize(token) for sub_token", "= i_noln * hS ed = (i_noln + 1) * hS wemb_n[b, 0:(i_nlu1[1]", "of all tokens from bert all_encoder_layer, pooled_output, tokens, i_nlu, i_hds,\\ l_n, l_hpu, l_hs,", "assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i in range(len(t_to_tt_idx)):", "in i_hds: for i_hds11 in i_hds1: l_hpu.append(i_hds11[1] - i_hds11[0]) return l_hpu def get_bert_output(model_bert,", "i_hds.append((i_st_hd, i_ed_hd)) segment_ids += [1] * len(sub_tok) if i < len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(0)", "= torch.tensor(masks, dtype=torch.long).to(device) with torch.no_grad(): all_encoder_layer= torch.cat([torch.cat(model_bert(i,j)[0],1) for i,j in zip(input_nodes,masks)],0) all_encoder_layer =", "== len(input_schema.column_names_embedder_input) assert list(wemb_h.size())[0] == len(input_schema.column_names_embedder_input) # print(22222222,len(input_schema.column_names_embedder_input),input_schema.column_names_embedder_input,input_schema.column_names_surface_form) utterance_states = [] for i", "# add foreign key relation # relations[0].append([tb_name[header],columns[col]]) else: # column id columns[col] =", "# start = t_to_tt_idx_hds11[i] # if i == len(t_to_tt_idx_hds11)-1: # end = l_hpu[cnt]", "print(i.split('.')) if i != \"*\" and len(i.split('.')) > 1: header,col = i.split('.') #", "nodes=relations=new_schema=None if bert_input_version == 'v1': nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema,", "prepare_input_v2(tokenizer, input_sequence, input_schema) wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds =", "real # tokens are attended to. input_mask1 = [1] * len(input_ids1) # 3.", "wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n def get_wemb_h(i_hds,", "# new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) # print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i])))", "zero for non-exist part. l_n1 = l_n[b] i_nlu1 = i_nlu[b] for i_noln in", "t_to_tt_idx_hds11 in t_to_tt_idx_hds1: cnt += 1 schema_token_states1 = [] for i in range(len(t_to_tt_idx_hds11)):", "assert len(set(l_n)) == 1 and len(set(i_nlu)) == 1 assert l_n[0] == i_nlu[0][1] -", "[] # for hds1 in all_hds: # new_hds1 = current_hds1 + [hds1] #", "# assert len(schema_token_states) == len(input_schema.column_names_embedder_input) return output,new_schema def get_bert_encoding(bert_config, model_bert, tokenizer, input_sequence, input_schema,", "# print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ## NOTE: foreign key relation can be column", "else: # end = t_to_tt_idx_hds11[i+1] # schema_token_states1.append(torch.mean(wemb_h[cnt,start:end,:], dim=0)) # assert len(schema_token_states1) == len(input_schema.column_names_embedder_input[cnt].split())", "& indices. tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1)", ".gated_graph_conv import GatedGraphConv from .bert import tokenization as tokenization from .bert.modeling import BertConfig,", "# tokenize nodes to feed into model masks = [] ## update new", "tokenizer, input_sequence, input_schema, bert_input_version='v1', gnn=None ,use_gnn=True, max_seq_length=512, num_out_layers_n=1, num_out_layers_h=1): # NOTE: add gnn", "if len(segment_ids1) > max_seq_length: nlu_t.append(nlu_t1) hds.append(current_hds1) current_hds1 = [hds1] else: current_hds1 = new_hds1", "# print(nodes[i]) # masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) #", "[] foreign_idx = [ i for i,j in input_schema.table_schema['foreign_keys']] primary_idx = [ i", "tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, new_hds1) if len(segment_ids1) > max_seq_length:", "= gen_l_hpu(i_hds) assert len(set(l_n)) == 1 and len(set(i_nlu)) == 1 assert l_n[0] ==", "= [] input_mask = [] i_nlu = [] # index to retreive the", "current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1) max_seq_length =", "[] tokens.append(\"[CLS]\") i_st_nlu = len(tokens) # to use it later segment_ids.append(0) for token", "print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation # relations[0].append([tb_name[header],columns[col]]) else: # column id", "= os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json') vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt') init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin') print('bert_config_file', bert_config_file)", "nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1 = [hds1] # else: # current_hds1 = new_hds1", "# print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i]))", "tables # if len(new_schema) != len(nodes): # print(new_schema,nodes, len(nodes),len(new_schema)) assert len(new_schema) ==len(nodes) for", "for (i, token) in enumerate(nlu_t1): # nlu_tt1 += tokenizer.tokenize(token) # current_hds1 = []", "all_hds: new_hds1 = current_hds1 + [hds1] tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer,", "len(nlu_t) == 1: max_seq_length = len(input_ids1) while len(input_ids1) < max_seq_length: input_ids1.append(0) input_mask1.append(0) segment_ids1.append(0)", "# TODO: feed into gnn and return embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks", "all_encoder_layer, num_out_layers_h): \"\"\" As if [ [table-1-col-1-tok1, t1-c1-t2, ...], [t1-c2-t1, t1-c2-t2, ...]. ...", "json import random as rd from copy import deepcopy import torch import torch.nn", "end indices of question in tokens # i_hds: start and end indices of", "'v2': raise(\"not inplemented\") nlu_t, hds, max_seq_length = prepare_input_v2(tokenizer, input_sequence, input_schema) # relations =", "#columns[col]= len(nodes)-1 # add column name to columns with index in nodes as", "find('id') != -1 # print('key') relations[2].append([tb_name[header],columns[col]]) # add foreign key relation else: #", "i_nlu: start and end indices of question in tokens # i_hds: start and", "# assert len(t_to_tt_idx) == len(input_sequence) # assert sum(len(t_to_tt_idx_hds1) for t_to_tt_idx_hds1 in t_to_tt_idx_hds) ==", "[] # orig_to_tok_idx[i] = start index of i-th-1st-level-token in all_tokens. nlu_tt1 = []", "# number indicates where sub-token belongs to in 1st-level-tokens (here, CoreNLP). t_to_tt_idx1 =", "input_sequence, input_schema, max_seq_length) if bert_input_version == 'v1': nlu_t, hds = prepare_input(tokenizer, input_sequence, input_schema,", "nlu_tt1, new_hds1) # if len(segment_ids1) > max_seq_length: # nlu_t.append(nlu_t1) # hds.append(current_hds1) # current_hds1", "bert_config.num_hidden_layers, all_encoder_layer, num_out_layers_h) return wemb_n, wemb_h, l_n, l_hpu, l_hs, \\ nlu_tt, t_to_tt_idx, tt_to_t_idx,", "BertConfig.from_json_file(bert_config_file) tokenizer = tokenization.FullTokenizer( vocab_file=vocab_file, do_lower_case=do_lower_case) bert_config.print_status() model_bert = BertModel(bert_config) if no_pretraining: pass", "25), (26, 29), (30, 34)]) \"\"\" l_hpu = [] for i_hds1 in i_hds:", "i_nolh st = i_nolh * hS ed = (i_nolh + 1) * hS", "(pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i]) # print(relations) # print(nodes,relations) # for", "foreign_idx = [ i for i,j in input_schema.table_schema['foreign_keys']] primary_idx = [ i for", "print(input_sequence) assert len(nodes) > 1 assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) # print(123123,", "relations: for j in i: j[0] += base # tokenize nodes to feed", "in primary_idx): relations[0].append([tb_name[header],columns[col]]) # find table name correspond to foreign key if len(nodes)-1", "if len(current_hds1) > 0: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1)", "nodes, relations, new_schema = prepare_input_gnn( tokenizer, input_sequence, input_schema, max_seq_length) elif bert_input_version == 'v2':", "redundancy for foreign key if col in columns: # find('id') != -1 #", "== current_table: current_hds1.append(hds1) else: tokens1, segment_ids1, i_nlu1, i_hds1, t_to_tt_idx_hds1 = generate_inputs(tokenizer, nlu_tt1, current_hds1)", "NOTE: foreign key relation can be column to column or table to column", "len(nlu_tt1)) # all_doc_tokens[ indicate the start position of original 'white-space' tokens. sub_tokens =", "print(input_schema.num_col) print(input_sequence) assert len(nodes) > 1 assert len(relations[0]) > 0 # print(123123123,all_encoder_layer[0][0].size(),len(all_encoder_layer[0]),len(all_encoder_layer),len(all_encoder_layer[3]),len(all_encoder_layer[10])) #", "!= len(nodes): new_schema = input_schema.column_names_surface_form + tables # if len(new_schema) != len(nodes): #", "in range(len(nodes)): new_schema.append(nodes[i]) # print(nodes[i]) nodes[i] = tokenizer.tokenize(nodes[i]) masks.append([1]*len(nodes[i]) + [0]*(pad_len-len(nodes[i]))) nodes[i] +=", "columns = {} nodes = [] foreign_idx = [ i for i,j in", "col.strip() != '*': # print(header,col) # first add headers nodes.append(i) # if not", "= None if use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names'] input_schema.num_col", "= 0 nlu_t1 = input_sequence all_hds = input_schema.column_names_embedder_input nlu_tt1 = [] for (i,", "!= \"*\" and len(i.split('.')) > 1: header,col = i.split('.') # if col.strip() !=", "BertConfig, BertModel device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") def get_bert(params): BERT_PT_PATH =", "= torch.zeros([num_of_all_hds, l_hpu_max, hS * num_out_layers_h]).to(device) # print('wemb_h: [num_of_all_hds, l_hpu_max, hS * num_out_layers_h]", "# for hds1 in all_hds: # new_hds1 = current_hds1 + [hds1] # tokens1,", "\"\"\" bS = len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs) wemb_h = torch.zeros([num_of_all_hds,", "hds, max_seq_length, num_out_layers_n, num_out_layers_h) # t_to_tt_idx = t_to_tt_idx[0] # assert len(t_to_tt_idx) == len(input_sequence)", "generate_inputs(tokenizer, nlu_tt1, hds1) assert len(t_to_tt_idx_hds1) == len(hds1) t_to_tt_idx_hds.append(t_to_tt_idx_hds1) input_ids1 = tokenizer.convert_tokens_to_ids(tokens1) # Input", "# if not col in columns: if not header in tables: tables.append(header) tb_name[header]", "current_hds1 = new_hds1 # if len(current_hds1) > 0: # nlu_t.append(nlu_t1) # hds.append(current_hds1) return", "input_schema.column_names_surface_form += [i.lower() for i in input_schema.table_schema['table_names_original']] nodes, relations, new_schema = prepare_input_gnn( tokenizer,", "import random as rd from copy import deepcopy import torch import torch.nn as", "# 5. generate l_hpu from i_hds l_hpu = gen_l_hpu(i_hds) assert len(set(l_n)) == 1", "enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] current_table = '' for hds1 in", "[t2-c1-t1, ...,] ] \"\"\" bS = len(l_hs) l_hpu_max = max(l_hpu) num_of_all_hds = sum(l_hs)", "[] for (i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] current_table", "embedding # print(relations) # print(2222222,type(input_nodes),input_nodes) masks = None input_nodes = nodes all_encoder_layer =", "* hS wemb_n[b, 0:(i_nlu1[1] - i_nlu1[0]), st:ed] = all_encoder_layer[i_layer][b, i_nlu1[0]:i_nlu1[1], :] return wemb_n", "[0]*(pad_len-len(nodes[i]))) # nodes[i] += ['[PAD]'] * (pad_len-len(nodes[i])) # nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) # print(nodes[i],masks[i])", "nodes[i] = tokenizer.convert_tokens_to_ids(nodes[i]) return nodes, masks, new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t =", "find table name correspond to foreign key if len(nodes)-1 in foreign_idx: # print(foreign_idx[0])", "# all_encoder_layer: BERT outputs from all layers. # pooled_output: output of [CLS] vec.", "i_ed_nlu = len(tokens) tokens.append(\"[SEP]\") segment_ids.append(0) i_hds = [] for i, hds11 in enumerate(hds1):", "= [] # for i in range(len(t_to_tt_idx_hds11)): # start = t_to_tt_idx_hds11[i] # if", "i in gnn(all_encoder_layer,relations)] # print(output) # wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx,", "relations = None if use_gnn: if not (input_schema.table_schema['table_names'][0] in input_schema.column_names_embedder_input): input_schema.column_names_embedder_input += input_schema.table_schema['table_names']", "tables base = len(nodes) nodes += tables for i in relations: for j", "BERT input tokens nlu_tt: WP-tokenized input natural language questions orig_to_tok_index: map the index", "1 and len(set(i_nlu)) == 1 assert l_n[0] == i_nlu[0][1] - i_nlu[0][0] return all_encoder_layer,", "(i, token) in enumerate(nlu_t1): nlu_tt1 += tokenizer.tokenize(token) current_hds1 = [] for hds1 in", "segment_ids.append(0) elif i == len(hds1)-1: tokens.append(\"[SEP]\") segment_ids.append(1) else: raise EnvironmentError i_nlu = (i_st_nlu,", "len(tokens) t_to_tt_idx_hds11 = [] sub_tok = [] for sub_tok1 in hds11.split(): t_to_tt_idx_hds11.append(len(sub_tok)) sub_tok", "name.column tables = [] tb_name = {} # index of header in node", "# print(output) # wemb_n, wemb_h, l_n, l_hpu, l_hs, nlu_tt, t_to_tt_idx, tt_to_t_idx, t_to_tt_idx_hds =", "new_schema def prepare_input_v2(tokenizer, input_sequence, input_schema): nlu_t = [] hds = [] max_seq_length =", "to foreign key if len(nodes)-1 in foreign_idx: # print(foreign_idx[0]) foreign_key[foreign_idx.index(len(nodes)-1)] = tb_name[header] ##", "input_mask.append(input_mask1) i_nlu.append(i_nlu1) i_hds.append(i_hds1) # Convert to tensor all_input_ids = torch.tensor(input_ids, dtype=torch.long).to(device) all_input_mask =", "= len(l_n) l_n_max = max(l_n) # print('wemb_n: [bS, l_n_max, hS * num_out_layers_n] =" ]
[ "= 4\" def test_basic_format_language(): template = fstr(\"{x!r} + {y!r} = {x + y!r}\",", "= [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 !=", "template.format() == \"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def", "\"1 2\" _bad_missing_expressions = [ \"{}\", \"{ '\" \" {} \", \"{!r}\", \"{", "def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\",", "_bad_missing_expressions = [ \"{}\", \"{ '\" \" {} \", \"{!r}\", \"{ !r}\", \"{10:{", "'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected def", "a special # case in the f-string parser to look for != as", "\"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template):", "== \"1 2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \"", "1 + 2 }\").format() == \"3\" def dict_inside_braces_with_padding(): template = fstr(\"{ {x: y}", "fstr(template).format() def test_side_effect_order(): class X: def __init__(self): self.i = 0 def __format__(self, spec):", "= {\"x\": \"X\", \"width\": 1} def make_template(n, extra=\"\"): return fstr((\"{x} \" + extra)", "template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected = \" -0xa\" if value < 0", "fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\",", "\"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" %", "12.35\", \"result: 12.35\", \"result: 12.35\", \" 0xa\", \" 0xa\", \" -0xa\", \" -0xa\",", "template): expected = \" -0xa\" if value < 0 else \" 0xa\" assert", "@pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions(): context =", "n in range(250, 260): make_template(n).format(**context) # Test around 256. for i in range(250,", "@pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3, 0): _causes_errors", "] _format_specifier_expression_expecteds = [ \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result:", "test_side_effect_order(): class X: def __init__(self): self.i = 0 def __format__(self, spec): self.i +=", "def test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"]", "== \"1 x y \" * 1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result:", "the following # whitespace characters in additional to a space. \"{\\t\\f\\r\\n}\", # Catch", "test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert ( fstr(", "\"{x:{width}} \").format(**context) expected = (context[\"x\"] + \" \") * 500 assert actual ==", "== \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def add(x, y): return x", "== \" 1.2\" def test_dict(): d = {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"}", "== \"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space():", "a conversion, but show that ! is allowed in a format spec. assert", "template = fstr(\"{x} + {y} = {x + y}\", x=1) assert template.format(y=2) ==", "< 0 else \" 0xa\" assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\",", "\"\"\"{3+ 4}\"\"\" ).format() == \"7\" # noqa: W503 ) _empty_format_specifiers = [ (\"{x}\",", "12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \" 0xa\", \" 0xa\",", "The Python parser ignores also the following # whitespace characters in additional to", "to look for != as not ending an # expression. Normally it would,", "\"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL", "range(250, 260): actual = make_template(i).format(**context) expected = (context[\"x\"] + \" \") * i", "expected): assert fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression)", "\"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", # We", "a format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\"", "{'y'} \" * 1000).format() == \"1 x y \" * 1000 _format_specifier_width_precision_templates =", "return fstr((\"{x} \" + extra) * n) for n in range(250, 260): make_template(n).format(**context)", "because there's a special # case in the f-string parser to look for", "for x, y in [(1, 2), ]}}\").format() == \"expr={1: 2}\" def test_not_equal(): #", "_bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\",", "10\" assert template.format(x=3, width=4) == \" 3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4,", "== \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \"", "\"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\",", "fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() == \"3\" def test_call(): def foo(x): return \"x=\"", "\"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2", "\" 10\" assert template.format(x=3, width=4) == \" 3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345,", "== \"3\" assert fstr(\"{1 + 2 }\").format() == \"3\" assert fstr(\"{ 1 +", "\"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\",", "\"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d)", "template.format(y=2) == \"1 + 2 = 3\" assert template.format(y=3) == \"1 + 3", "[ \"{}\", \"{ '\" \" {} \", \"{!r}\", \"{ !r}\", \"{10:{ }}\", \"", "y for x, y in [(1, 2), ]} }\").format() == \"expr={1: 2}\" assert", "* 1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\",", "[ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers)", "def test_many_expressions(): context = {\"x\": \"X\", \"width\": 1} def make_template(n, extra=\"\"): return fstr((\"{x}", "% brace).format() def test_many_expressions(): context = {\"x\": \"X\", \"width\": 1} def make_template(n, extra=\"\"):", "\", # The Python parser ignores also the following # whitespace characters in", "fstr(\"{%s}\" % brace).format() def test_many_expressions(): context = {\"x\": \"X\", \"width\": 1} def make_template(n,", "make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"] + \" \") * 500 assert actual", "format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\"", "(10, \"{value:#{3 != {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template):", "\"{3!:}\" \"{3! s}\" # no space before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\",", "@pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\": 4, \"value\": 12.34567} assert", "(3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info < (3, 6): _causes_errors =", "!xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", # We find the empty expression before the", "== \"'a'\" if version_info >= (3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\" # Not", "pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def __format__(self, spec): if not spec: return", "_format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context)", "def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format() ==", "{value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result:", "assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\",", "def test_call(): def foo(x): return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def", "def add(x, y): return x + y template = fstr(\"{add(x, y)}\", add=add) assert", "4, \"value\": 12.34567} assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"),", "\"expr={1: 2}\" assert fstr(\"expr={ {x: y for x, y in [(1, 2), ]}}\").format()", "assert template.format(y=2) == \"1 + 2 = 3\" assert template.format(y=3) == \"1 +", "== \"expr={1: 2}\" assert fstr(\"expr={ {x: y for x, y in [(1, 2),", "== \"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format() == \"{}\" def test_odd_double_brace_replacement():", "\"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions(): context = {\"x\":", "== \"'a' + 'c' = 'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local =", "fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions = [ \"{}\", \"{ '\" \" {}", "the # invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\",", "0 def __format__(self, spec): self.i += 1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1", "] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces =", "char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format()", "context = {\"width\": 10, \"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context) == \"result: 12.35\"", "# Not a conversion, but show that ! is allowed in a format", "expected): assert fstr(template).format() == expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\",", "assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\"", "1 def test_simple_fstr_evaluate(): a_local = 2 # noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() ==", "_ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert", "fstr(\"expr={ {x: y for x, y in [(1, 2), ]}}\").format() == \"expr={1: 2}\"", "fstr(\"{0}\").format() == \"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\" # noqa:", "fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3 }\").format() ==", "(10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and width}x}\"), ]", "= make_template(i).format(**context) expected = (context[\"x\"] + \" \") * i assert actual ==", "W503 ) _empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"),", "fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\")", "in [(1, 2), ]}}\").format() == \"expr={1: 2}\" def test_not_equal(): # There's a special", "4\" def test_basic_format_language(): template = fstr(\"{x!r} + {y!r} = {x + y!r}\", x=\"a\")", "= [ \"{}\", \"{ '\" \" {} \", \"{!r}\", \"{ !r}\", \"{10:{ }}\",", "in range(250, 260): actual = make_template(i).format(**context) expected = (context[\"x\"] + \" \") *", "(r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER", "\"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template,", "self.i += 1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions = [", "whitespace characters in additional to a space. \"{\\t\\f\\r\\n}\", # Catch the empty expression", "# invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\",", "CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\",", "\"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected): assert fstr(template, x=5).format() ==", "fstr('{\"a\"!a}').format() == \"'a'\" # Not a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" # Not", "= fstr(\"{x!r} + {y!r} = {x + y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a'", "{value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ]", "== \" 10\" assert template.format(x=3, width=4) == \" 3\" template = fstr(\"{x:{width}.{precision}}\") assert", "brace).format() def test_many_expressions(): context = {\"x\": \"X\", \"width\": 1} def make_template(n, extra=\"\"): return", "def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\",", "assert fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def", "\"{ !r}\", \"{10:{ }}\", \" { } \", # The Python parser ignores", "\"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def add(x, y): return x +", "import fstr def test_basic(): template = fstr(\"{x} + {y} = {x + y}\",", "fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\",", "_ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\",", "assert template.format(y=3) == \"1 + 3 = 4\" def test_basic_format_language(): template = fstr(\"{x!r}", "(r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"),", "\"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format()", "assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10,", "template = fstr(\"{{}}\") assert template.format() == \"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert", "pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions(): context = {\"x\": \"X\", \"width\": 1} def", "\"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"),", "1000).format() == \"1 x y \" * 1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\",", "'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\"", "precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\",", "('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\"", "conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad in bad_conversions: with pytest.raises(SyntaxError):", "}\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={ {x: y for", "] else: _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ]", "= fstr(\"{x} + {y} = {x + y}\", x=1) assert template.format(y=2) == \"1", "== \"1 + 3 = 4\" def test_basic_format_language(): template = fstr(\"{x!r} + {y!r}", "y in [(1, 2), ]} }\").format() == \"expr={1: 2}\" assert fstr(\"expr={ {x: y", "\"'a'\" if version_info >= (3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\" # Not a", "_backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\",", "before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad in bad_conversions: with", "== \"result: 12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10,", "= [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part", "\" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\",", "6): _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else:", "{y} = {x + y}\", x=1) assert template.format(y=2) == \"1 + 2 =", "assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert", "\"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"),", "] if version_info >= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\",", "assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\" # noqa: W503 ) _empty_format_specifiers", "not spec: return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) ==", "-0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\":", "(\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\",", "(\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER", "= [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors =", "\"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [", "\"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\" # noqa: W503 )", "== \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info >= (3, 0): assert fstr('{\"a\"!a}').format()", "and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected = \" -0xa\"", "\"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\",", "\"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"),", "* n) for n in range(250, 260): make_template(n).format(**context) # Test around 256. for", "[ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\",", "= [(\"{1000:j}\", SyntaxError)] elif version_info < (3, 6): _causes_errors = [ (\"{(lambda: 0):x}\",", "expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\"", "def test_format_hex_specifier_expressions(value, template): expected = \" -0xa\" if value < 0 else \"", "\"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{ 3}\").format() == \"3\"", "assert fstr(template, x=5).format() == expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\"", "assert fstr(\"{0}\").format() == \"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\" #", "LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"),", "LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part)", "x=\"a\") assert template.format(y=1) == \"{'a': 1}\" def test_hash_in_string(): # These aren't comments, since", "-0xa\" if value < 0 else \" 0xa\" assert fstr(template).format(value=value, width=10) == expected", "+ 2}\").format() == \"3\" assert fstr(\"{1 + 2 }\").format() == \"3\" assert fstr(\"{", "for != as not ending an # expression. Normally it would, while looking", "_causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors", "\"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\",", "_invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3, 0): _causes_errors =", "\"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions", "pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info", "template = fstr(\"{ {x: y} }\", x=\"a\") assert template.format(y=1) == \"{'a': 1}\" def", "fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format()", "fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if", "test_basic(): template = fstr(\"{x} + {y} = {x + y}\", x=1) assert template.format(y=2)", "LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"),", "\" 0xa\", \" 0xa\", \" -0xa\", \" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates)", "assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def", "@pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\",", "expected = \" -0xa\" if value < 0 else \" 0xa\" assert fstr(template).format(value=value,", "10, \"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates = [", "\"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas)", "assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def add(x, y):", "\"3\" assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3", "(r\"\\{6*7}\", \"\\\\42\"), ] if version_info >= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\",", "\"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER", "\"3\" assert fstr(\"expr={ {x: y for x, y in [(1, 2), ]} }\").format()", "spec: return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\"", "expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\",", "('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}',", "\" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info >= (3, 0): _backlashes_in_string_part.extend(", "\" 0xa\" assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\",", "def __format__(self, spec): if not spec: return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) ==", "] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected def test_missing_variable():", "expected\", _ok_lambdas) def test_lambda(template, expected): assert fstr(template, x=5).format() == expected _triple_quoted_strings = [", "= [ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\",", "\"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" #", "version_info < (3, 6): _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\",", "\"{}\", \"{ '\" \" {} \", \"{!r}\", \"{ !r}\", \"{10:{ }}\", \" {", "expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\",", "width=4) == \" 3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) == \"", "test_lambda(template, expected): assert fstr(template, x=5).format() == expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\",", "'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert", "_ok_lambdas) def test_lambda(template, expected): assert fstr(template, x=5).format() == expected _triple_quoted_strings = [ (\"{'''x'''}\",", "expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format()", "\"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format() == \"{}\" def test_odd_double_brace_replacement(): template", "parser ignores also the following # whitespace characters in additional to a space.", "with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def __format__(self, spec): if not spec:", "assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() == \"3\" def test_call(): def foo(x): return", "\"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format() == \"3\" assert fstr(\"{1 +", "[ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{", "foo(x): return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{", "test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format()", "= \" -0xa\" if value < 0 else \" 0xa\" assert fstr(template).format(value=value, width=10)", "(\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def", "@pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected _ok_lambdas = [", "(3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\" # Not a conversion. assert fstr('{\"a!r\"}').format() ==", "= 0 def __format__(self, spec): self.i += 1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) ==", "= [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\", _causes_errors)", "def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format() == \"{}\" def test_odd_double_brace_replacement(): template =", "_backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\",", "def test_basic_format_language(): template = fstr(\"{x!r} + {y!r} = {x + y!r}\", x=\"a\") assert", "3}\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\"", "if value < 0 else \" 0xa\" assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates", "{ } \", # The Python parser ignores also the following # whitespace", "\"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format()", "* i assert actual == expected actual = make_template(250, \"{x:{width}} \").format(**context) expected =", "{x: y} }\", x=\"a\") assert template.format(y=1) == \"{'a': 1}\" def test_hash_in_string(): # These", "test_format_with_function(): def add(x, y): return x + y template = fstr(\"{add(x, y)}\", add=add)", "pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template,", "fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format()", "== \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\"", "+ y}\", x=1) assert template.format(y=2) == \"1 + 2 = 3\" assert template.format(y=3)", "fstr(template, x=5).format() == expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\"", "from sys import version_info import fstr def test_basic(): template = fstr(\"{x} + {y}", "_format_specifier_expression_expecteds = [ \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\",", "= {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d)", "y} }\", x=\"a\") assert template.format(y=1) == \"{'a': 1}\" def test_hash_in_string(): # These aren't", "template.format(y=\"c\") == \"'a' + 'c' = 'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local", "== \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format() == \"3\" assert fstr(\"{1", "\" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\": 4,", "== \"1 2\" _bad_missing_expressions = [ \"{}\", \"{ '\" \" {} \", \"{!r}\",", "] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected = \" -0xa\" if value", "(\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"),", "fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if", "\"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\",", "\"expr={1: 2}\" def test_not_equal(): # There's a special test for this because there's", "fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14", "test_many_expressions(): context = {\"x\": \"X\", \"width\": 1} def make_template(n, extra=\"\"): return fstr((\"{x} \"", "{'x'} {'y'} \" * 1000).format() == \"1 x y \" * 1000 _format_specifier_width_precision_templates", "= (context[\"x\"] + \" \") * i assert actual == expected actual =", "2), ]} }\").format() == \"expr={1: 2}\" assert fstr(\"expr={ {x: y for x, y", "We find the empty expression before the # missing closing brace. \"{!\", \"{!s:\",", "LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"),", "\"3.14 \" if version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert", "actual == expected actual = make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"] + \"", "= {x + y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a' + 'b' = 'ab'\"", "while looking for !s or !r. assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() ==", "] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected): assert fstr(template, x=5).format() == expected _triple_quoted_strings", "3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"),", "\"3\" def dict_inside_braces_with_padding(): template = fstr(\"{ {x: y} }\", x=\"a\") assert template.format(y=1) ==", "conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", #", "12.35\", \" 0xa\", \" 0xa\", \" -0xa\", \" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\",", "fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions():", "1.2\" def test_dict(): d = {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d)", "(r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"),", "\" 3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) == \" 1.2\" def", "\" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"),", "\"{ '\" \" {} \", \"{!r}\", \"{ !r}\", \"{10:{ }}\", \" { }", "\"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected): assert", "[\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class", "\"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"),", "the f-string parser to look for != as not ending an # expression.", "template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format() == \"3\" assert", "_invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info", "Catch the empty expression before the # invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\",", "test_basic_format_language(): template = fstr(\"{x!r} + {y!r} = {x + y!r}\", x=\"a\") assert template.format(y=\"b\")", "(\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected):", "(\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER", "# We find the empty expression before the # missing closing brace. \"{!\",", "\") * 500 assert actual == expected # Test lots of expressions and", "fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"),", "\"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected):", "x, y in [(1, 2), ]}}\").format() == \"expr={1: 2}\" def test_not_equal(): # There's", "fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={ {x: y", "{value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds =", "# noqa: W503 ) _empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"),", "\"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def add(x,", "\"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions =", "in the f-string parser to look for != as not ending an #", "\"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\",", "str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\" assert", "\"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={", "3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) == \" 1.2\" def test_dict():", "0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result:", "3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info >= (3,", "template.format(y=3) == \"1 + 3 = 4\" def test_basic_format_language(): template = fstr(\"{x!r} +", "test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \" 10\" assert template.format(x=3, width=4)", "in additional to a space. \"{\\t\\f\\r\\n}\", # Catch the empty expression before the", "+ 2 }\").format() == \"3\" def dict_inside_braces_with_padding(): template = fstr(\"{ {x: y} }\",", "spec): self.i += 1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions =", "it would, while looking for !s or !r. assert fstr(\"{3!=4}\").format() == \"True\" assert", "pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format()", "(\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK", "@pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected): assert fstr(template, x=5).format() == expected _triple_quoted_strings =", "test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces)", "== \"{'a': 1}\" def test_hash_in_string(): # These aren't comments, since they're in strings.", "fstr(\"{1} {'x'} {'y'} \" * 1000).format() == \"1 x y \" * 1000", "\"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\",", "(\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\",", "== \"a!r\" # Not a conversion, but show that ! is allowed in", "(\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"),", "\"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected", "= [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order():", "= 1 def test_simple_fstr_evaluate(): a_local = 2 # noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate()", "x=5).format() == expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}',", "_format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{", "ending an # expression. Normally it would, while looking for !s or !r.", "(3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\",", "\"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() == \"3\"", "'ab'\" assert template.format(y=\"c\") == \"'a' + 'c' = 'ac'\" _A_GLOBAL = 1 def", "invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\",", "\" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10,", "expression before the # missing closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ]", "_backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"),", "(\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\",", "3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\",", "'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\"", "an # expression. Normally it would, while looking for !s or !r. assert", "\"1 2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \" 10\"", "fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def add(x, y): return", "\"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\"", "fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError):", "missing closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template):", "{\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def", "== expected # Test lots of expressions and constants. assert fstr(\"{1} {'x'} {'y'}", "\"result: 12.35\", \"result: 12.35\", \" 0xa\", \" 0xa\", \" -0xa\", \" -0xa\", \"", "fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3)", "conversion, but show that ! is allowed in a format spec. assert fstr(\"{3.14:!<10.10}\").format()", "+ 'c' = 'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local = 2 #", "bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def", "with pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif", "\"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\",", "\" { } \", # The Python parser ignores also the following #", "== \"bar\" def test_format_with_function(): def add(x, y): return x + y template =", "version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format() == \"a\"", "\" 1.2\" def test_dict(): d = {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert", "a space. \"{\\t\\f\\r\\n}\", # Catch the empty expression before the # invalid conversion.", "] for bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\",", "('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format()", "fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3!", "LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"),", "_bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template,", "= [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"),", "\"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template,", "return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format()", "s}\" # no space before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for", "DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ]", "for bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions)", "\"3\" assert fstr(\"{3!s:}\").format() == \"3\" def test_call(): def foo(x): return \"x=\" + str(x)", "assert fstr(\"{ 1 + 2}\").format() == \"3\" assert fstr(\"{1 + 2 }\").format() ==", "(\"{1000:j}\", SyntaxError), ] else: _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\",", "\"{value:#{3 != {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected", "make_template(i).format(**context) expected = (context[\"x\"] + \" \") * i assert actual == expected", "constants. assert fstr(\"{1} {'x'} {'y'} \" * 1000).format() == \"1 x y \"", "0xa\" assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates)", "CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\",", "# noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr(): template =", "\"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \" 0xa\", \"", "_invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def", "def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions)", "\"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", # We find", "12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \" 0xa\", \" 0xa\", \" -0xa\",", "width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with", "fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function():", "test_missing_format_spec(): class Obj: def __format__(self, spec): if not spec: return \"*\" return spec", "1}\" def test_hash_in_string(): # These aren't comments, since they're in strings. d =", "fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) == \" 1.2\" def test_dict(): d = {'\"':", "\" if version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format()", "\" -0xa\" if value < 0 else \" 0xa\" assert fstr(template).format(value=value, width=10) ==", "\"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"),", "with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template):", "parser to look for != as not ending an # expression. Normally it", "special test for this because there's a special # case in the f-string", "= {\"width\": 10, \"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates", "1} def make_template(n, extra=\"\"): return fstr((\"{x} \" + extra) * n) for n", "(\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def", "aren't comments, since they're in strings. d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() ==", "\" \") * i assert actual == expected actual = make_template(250, \"{x:{width}} \").format(**context)", "return x + y template = fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2) ==", "\"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\",", "TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError),", "fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def", "in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid):", "}\").format() == \"3\" assert fstr(\"{ 1 + 2 }\").format() == \"3\" def dict_inside_braces_with_padding():", "\" -0xa\", \" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context =", "fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \"", "0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() ==", "= [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\",", "assert fstr(\"{1 + 2 }\").format() == \"3\" assert fstr(\"{ 1 + 2 }\").format()", "\"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() ==", "== \"3.14 \" assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info", "(\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER", "] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected _backslashes_in_expression", "{\"x\": \"X\", \"width\": 1} def make_template(n, extra=\"\"): return fstr((\"{x} \" + extra) *", "closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with", "(\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER", "\"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL", "\"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces =", "\"'a' + 'b' = 'ab'\" assert template.format(y=\"c\") == \"'a' + 'c' = 'ac'\"", "\"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [", "[ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ]", "== \"expr={1: 2}\" def test_not_equal(): # There's a special test for this because", "since they're in strings. d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert", "== \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert", "assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format()", "spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\"", "_bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\",", "pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"),", "DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK", "== expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'),", "\"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected _ok_lambdas =", "no space before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad in", "(\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER", "\"{!r}\", \"{ !r}\", \"{10:{ }}\", \" { } \", # The Python parser", "\").format(**context) expected = (context[\"x\"] + \" \") * 500 assert actual == expected", "(-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def", "(\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\"", "show that ! is allowed in a format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\"", "+ 3 = 4\" def test_basic_format_language(): template = fstr(\"{x!r} + {y!r} = {x", "\"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\",", "class Obj: def __format__(self, spec): if not spec: return \"*\" return spec assert", "def test_side_effect_order(): class X: def __init__(self): self.i = 0 def __format__(self, spec): self.i", "0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"),", "spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\"", "(\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\", _causes_errors) def test_errors(bad,", "\"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL", "for i in range(250, 260): actual = make_template(i).format(**context) expected = (context[\"x\"] + \"", "test_hash_in_string(): # These aren't comments, since they're in strings. d = {\"#\": \"hash\"}", "2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \" 10\" assert", "\" \") * 500 assert actual == expected # Test lots of expressions", "1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result:", "pytest from sys import version_info import fstr def test_basic(): template = fstr(\"{x} +", "\"value\": 12.34567} assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10,", "\"{\\t\\f\\r\\n}\", # Catch the empty expression before the # invalid conversion. \"{!x}\", \"{", "(\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL", "\" * 1000).format() == \"1 x y \" * 1000 _format_specifier_width_precision_templates = [", "def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X: def __init__(self): self.i =", "\"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\",", "(r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"),", "\"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected):", "fstr(\"expr={ {x: y for x, y in [(1, 2), ]} }\").format() == \"expr={1:", "case in the f-string parser to look for != as not ending an", "\"a!r\" # Not a conversion, but show that ! is allowed in a", "\"*\" assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() == \"3\" def test_call(): def foo(x):", "\"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \"", "assert fstr(\"{3!s:}\").format() == \"3\" def test_call(): def foo(x): return \"x=\" + str(x) assert", "assert template.format(y=\"b\") == \"'a' + 'b' = 'ab'\" assert template.format(y=\"c\") == \"'a' +", "r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert", "12.35\", \"result: 12.35\", \" 0xa\", \" 0xa\", \" -0xa\", \" -0xa\", \" 0xa\",", "= fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement(): template =", "def test_dict(): d = {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) ==", "0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors = [ (\"{(lambda: 0):x}\",", "the empty expression before the # invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\",", "fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected):", "test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda", "test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)]", "not ending an # expression. Normally it would, while looking for !s or", "3 = 4\" def test_basic_format_language(): template = fstr(\"{x!r} + {y!r} = {x +", "@pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X: def __init__(self):", "!r}\", \"{10:{ }}\", \" { } \", # The Python parser ignores also", "[(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() ==", "(\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\",", "y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a' + 'b' = 'ab'\" assert template.format(y=\"c\") ==", "expected = (context[\"x\"] + \" \") * i assert actual == expected actual", "\"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert", "@pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\"", "= [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\",", "def test_lambda(template, expected): assert fstr(template, x=5).format() == expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"),", "in [(1, 2), ]} }\").format() == \"expr={1: 2}\" assert fstr(\"expr={ {x: y for", "SyntaxError)] elif version_info < (3, 6): _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\",", "'b' = 'ab'\" assert template.format(y=\"c\") == \"'a' + 'c' = 'ac'\" _A_GLOBAL =", "template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format() == \"{}\"", "fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\" # noqa: W503 ) _empty_format_specifiers = [", "around 256. for i in range(250, 260): actual = make_template(i).format(**context) expected = (context[\"x\"]", "\"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates)", "expression before the # invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\",", "test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3", "template.format(y=1) == \"{'a': 1}\" def test_hash_in_string(): # These aren't comments, since they're in", "X: def __init__(self): self.i = 0 def __format__(self, spec): self.i += 1 return", "def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert (", "assert template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format() ==", "(\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\",", "for x, y in [(1, 2), ]} }\").format() == \"expr={1: 2}\" assert fstr(\"expr={", "expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\",", "y \" * 1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\",", "\"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL", "fstr((\"{x} \" + extra) * n) for n in range(250, 260): make_template(n).format(**context) #", "(\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"),", "(\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info >= (3, 0):", "0xa\", \" -0xa\", \" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context", "] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\",", "y=2) == \"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format() == \"{}\" def", "assert fstr(\"{1} {'x'} {'y'} \" * 1000).format() == \"1 x y \" *", "(\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info >= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\",", "assert fstr(\"{ 1 + 2 }\").format() == \"3\" def dict_inside_braces_with_padding(): template = fstr(\"{", "ignores also the following # whitespace characters in additional to a space. \"{\\t\\f\\r\\n}\",", "\"{:}\", # We find the empty expression before the # missing closing brace.", "\"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\",", "assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() == \"3\" assert", "\"X\", \"width\": 1} def make_template(n, extra=\"\"): return fstr((\"{x} \" + extra) * n)", "assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def add(x, y): return x + y", "== expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda", "\"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template,", "return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions = [ \"{}\", \"{ '\"", "= fstr(\"{ {x: y} }\", x=\"a\") assert template.format(y=1) == \"{'a': 1}\" def test_hash_in_string():", "looking for !s or !r. assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() == \"True\"", "\"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected", "def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def __format__(self, spec): if", "== \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\"", "(-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\",", "\"1 x y \" * 1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\",", "== \"3\" assert fstr(\"{3!s:}\").format() == \"3\" def test_call(): def foo(x): return \"x=\" +", "2 # noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr(): template", "also the following # whitespace characters in additional to a space. \"{\\t\\f\\r\\n}\", #", "\" + extra) * n) for n in range(250, 260): make_template(n).format(**context) # Test", "\"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \" 0xa\", \" 0xa\", \" -0xa\", \"", "'\" \" {} \", \"{!r}\", \"{ !r}\", \"{10:{ }}\", \" { } \",", "== expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\",", "assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() == \"3\" def", "fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template):", "fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={ {x: y for x, y in [(1,", "+ 2 = 3\" assert template.format(y=3) == \"1 + 3 = 4\" def", "__init__(self): self.i = 0 def __format__(self, spec): self.i += 1 return str(self.i) fstr(\"{x}", "('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def", "\"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert", "# Test lots of expressions and constants. assert fstr(\"{1} {'x'} {'y'} \" *", "\"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def", "\" * 1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result:", "(\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\", _causes_errors) def test_errors(bad, etype): with pytest.raises(etype):", "version_info import fstr def test_basic(): template = fstr(\"{x} + {y} = {x +", "assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def", "dict_inside_braces_with_padding(): template = fstr(\"{ {x: y} }\", x=\"a\") assert template.format(y=1) == \"{'a': 1}\"", "@pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"]", "\"{'a': 1}\" def test_hash_in_string(): # These aren't comments, since they're in strings. d", "{y!r} = {x + y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a' + 'b' =", "= 2 # noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr():", "if version_info >= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"),", "fstr(\"{ {x: y} }\", x=\"a\") assert template.format(y=1) == \"{'a': 1}\" def test_hash_in_string(): #", "spec): if not spec: return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert", "\"3\" def test_call(): def foo(x): return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\"", "}\").format() == \"expr={1: 2}\" assert fstr(\"expr={ {x: y for x, y in [(1,", "(\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}',", "\"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \"", "(context[\"x\"] + \" \") * 500 assert actual == expected # Test lots", "2), ]}}\").format() == \"expr={1: 2}\" def test_not_equal(): # There's a special test for", "= make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"] + \" \") * 500 assert", "{x + y}\", x=1) assert template.format(y=2) == \"1 + 2 = 3\" assert", "( fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\" # noqa: W503 ) _empty_format_specifiers =", "500 assert actual == expected # Test lots of expressions and constants. assert", "{value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result: 12.35\", \"result: 12.35\", \"result:", "fstr(template).format() == expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"),", "x=1) assert template.format(y=2) == \"1 + 2 = 3\" assert template.format(y=3) == \"1", "fstr(\"{1 + 2 }\").format() == \"3\" assert fstr(\"{ 1 + 2 }\").format() ==", "context = {\"x\": \"X\", \"width\": 1} def make_template(n, extra=\"\"): return fstr((\"{x} \" +", "] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\",", "= [ \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"", "== expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def __format__(self,", "with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError):", "\" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format()", "\"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"),", "\"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template,", "\"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", # We find the empty expression before", "fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14", "\"result: 12.35\", \" 0xa\", \" 0xa\", \" -0xa\", \" -0xa\", \" 0xa\", ]", "0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info < (3, 6): _causes_errors = [", "self.i = 0 def __format__(self, spec): self.i += 1 return str(self.i) fstr(\"{x} {x}\").format(x=X())", ">= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format() == \"a\" assert", "fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d)", "(\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2", "\"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result: 12.35\", \"result: 12.35\",", "(r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\",", ">= (3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\" # Not a conversion. assert fstr('{\"a!r\"}').format()", "\"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value,", "(\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\",", "_empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template,", "assert fstr('{\"a\"!a}').format() == \"'a'\" # Not a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" #", "expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected def test_missing_variable(): with pytest.raises(NameError):", "_empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\",", "\"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and width}x}\"),", "def test_missing_format_spec(): class Obj: def __format__(self, spec): if not spec: return \"*\" return", "= [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'),", "fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() ==", "Obj: def __format__(self, spec): if not spec: return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj)", "special # case in the f-string parser to look for != as not", "version_info < (3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info < (3, 6):", "2 = 3\" assert template.format(y=3) == \"1 + 3 = 4\" def test_basic_format_language():", "if version_info < (3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info < (3,", "assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() ==", "else: _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad,", "+ 'b' = 'ab'\" assert template.format(y=\"c\") == \"'a' + 'c' = 'ac'\" _A_GLOBAL", "!= {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected =", "4}\"\"\" ).format() == \"7\" # noqa: W503 ) _empty_format_specifiers = [ (\"{x}\", \"test\"),", "0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\", _causes_errors) def test_errors(bad, etype):", "F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert", "[ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"),", "(r\"\\x20\", \"\\\\x20\"), (r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"),", "they're in strings. d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d)", "\"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"),", "[r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions():", "bad_conversions = [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" # no", "additional to a space. \"{\\t\\f\\r\\n}\", # Catch the empty expression before the #", "(\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected):", "\"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() == \"3\"", "\"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format()", "\"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"),", "0): assert fstr('{\"a\"!a}').format() == \"'a'\" # Not a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\"", "else \" 0xa\" assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"]", "version_info >= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\",", "make_template(n, extra=\"\"): return fstr((\"{x} \" + extra) * n) for n in range(250,", "\"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2", "{x: y for x, y in [(1, 2), ]}}\").format() == \"expr={1: 2}\" def", "== \" 3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) == \" 1.2\"", "x y \" * 1000 _format_specifier_width_precision_templates = [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result:", "@pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected = \" -0xa\" if value <", "3\" assert template.format(y=3) == \"1 + 3 = 4\" def test_basic_format_language(): template =", "\"bar\" def test_format_with_function(): def add(x, y): return x + y template = fstr(\"{add(x,", "12.34567} assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"),", "test_dict(): d = {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\"", "(\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\",", "(\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\",", "assert template.format(x=1.2345, width=4, precision=2) == \" 1.2\" def test_dict(): d = {'\"': \"double-quote\",", "test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format() == \"3\" assert fstr(\"{1 + 2 }\").format()", "y}\", x=1) assert template.format(y=2) == \"1 + 2 = 3\" assert template.format(y=3) ==", "_A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local = 2 # noqa: F841 assert fstr(\"{_A_GLOBAL}", "\"{!}\", \"{:}\", # We find the empty expression before the # missing closing", "\"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if", "\"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'),", "fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() == \"3\" def test_call():", "\"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces", "== \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\"", "\"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\",", "\"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert", "extra) * n) for n in range(250, 260): make_template(n).format(**context) # Test around 256.", "_format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3", "CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\",", "_backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format()", "Test lots of expressions and constants. assert fstr(\"{1} {'x'} {'y'} \" * 1000).format()", "\"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() ==", "y template = fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement():", "DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def", "== \"7\" # noqa: W503 ) _empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"),", "sys import version_info import fstr def test_basic(): template = fstr(\"{x} + {y} =", "\") * i assert actual == expected actual = make_template(250, \"{x:{width}} \").format(**context) expected", ") @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected _backslashes_in_expression =", "fstr(template).format() == expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def", "[(1, 2), ]} }\").format() == \"expr={1: 2}\" assert fstr(\"expr={ {x: y for x,", "2}\").format() == \"3\" assert fstr(\"{1 + 2 }\").format() == \"3\" assert fstr(\"{ 1", "Python parser ignores also the following # whitespace characters in additional to a", "__format__(self, spec): self.i += 1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions", "('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}',", "assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3 }\").format()", "_causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info < (3, 6): _causes_errors = [ (\"{(lambda:", "CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\",", "[ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"),", "\"{ !xr:a}\", \"{!}\", \"{:}\", # We find the empty expression before the #", "# no space before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad", "{'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) ==", "\"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", # We find the empty expression", "fstr(\"{x!r} + {y!r} = {x + y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a' +", "with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def", "[\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3,", "test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec():", "(\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\")", "!r. assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\"", "# The Python parser ignores also the following # whitespace characters in additional", "to a space. \"{\\t\\f\\r\\n}\", # Catch the empty expression before the # invalid", "[ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\"", "\"'a'\" # Not a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" # Not a conversion,", "= 'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local = 2 # noqa: F841", "(r\"\\x20\", \"\\\\x20\"), (\"{2}\\x20\", \"2 \"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2", "== \"3\" def dict_inside_braces_with_padding(): template = fstr(\"{ {x: y} }\", x=\"a\") assert template.format(y=1)", "i in range(250, 260): actual = make_template(i).format(**context) expected = (context[\"x\"] + \" \")", "for !s or !r. assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert", "but show that ! is allowed in a format spec. assert fstr(\"{3.14:!<10.10}\").format() ==", "template = fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement(): template", "There's a special test for this because there's a special # case in", "before the # missing closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\",", "@pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")]", "fstr def test_basic(): template = fstr(\"{x} + {y} = {x + y}\", x=1)", "test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [ \"{{}\", \"{{}}}\", \"}\", \"x}\",", "(\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected", "\"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError):", "def test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format() == \"3\" assert fstr(\"{1 + 2", "__format__(self, spec): if not spec: return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\"", "= {x + y}\", x=1) assert template.format(y=2) == \"1 + 2 = 3\"", "\"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info >=", "\"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result: 12.35\",", "\"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}',", "fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format()", "with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions(): context = {\"x\": \"X\", \"width\": 1}", "# These aren't comments, since they're in strings. d = {\"#\": \"hash\"} assert", "\"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace):", "[\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part =", "assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3 }\").format()", "with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X: def __init__(self): self.i = 0 def", "== \"1 + 2 = 3\" assert template.format(y=3) == \"1 + 3 =", "expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\",", "3}\").format() == \"3\" assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\"", "]}}\").format() == \"expr={1: 2}\" def test_not_equal(): # There's a special test for this", "test_format_hex_specifier_expressions(value, template): expected = \" -0xa\" if value < 0 else \" 0xa\"", "def test_hash_in_string(): # These aren't comments, since they're in strings. d = {\"#\":", "extra=\"\"): return fstr((\"{x} \" + extra) * n) for n in range(250, 260):", "y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected): assert fstr(template, x=5).format()", "and constants. assert fstr(\"{1} {'x'} {'y'} \" * 1000).format() == \"1 x y", "def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \" 10\" assert template.format(x=3,", "assert template.format(y=\"c\") == \"'a' + 'c' = 'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate():", "\"), (\"{2}\\x20{3}\", \"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"),", "+ {y!r} = {x + y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a' + 'b'", "empty expression before the # invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{", "if not spec: return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj)", "actual == expected # Test lots of expressions and constants. assert fstr(\"{1} {'x'}", "fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \" 10\" assert template.format(x=3, width=4) == \" 3\"", "\"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ]", "\"1 + 3 = 4\" def test_basic_format_language(): template = fstr(\"{x!r} + {y!r} =", "== \"*\" assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() == \"3\" def test_call(): def", "test_simple_fstr_evaluate(): a_local = 2 # noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\"", "260): make_template(n).format(**context) # Test around 256. for i in range(250, 260): actual =", "1 + 2}\").format() == \"3\" assert fstr(\"{1 + 2 }\").format() == \"3\" assert", "\" 0xa\", \" -0xa\", \" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template):", "expression. Normally it would, while looking for !s or !r. assert fstr(\"{3!=4}\").format() ==", "assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={ {x:", "would, while looking for !s or !r. assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format()", "\"3\" assert fstr(\"{ 1 + 2 }\").format() == \"3\" def dict_inside_braces_with_padding(): template =", "actual = make_template(i).format(**context) expected = (context[\"x\"] + \" \") * i assert actual", "fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def add(x, y): return x + y template", "\"{10:{ }}\", \" { } \", # The Python parser ignores also the", "assert actual == expected # Test lots of expressions and constants. assert fstr(\"{1}", "}}\", \" { } \", # The Python parser ignores also the following", "expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [ \"{{}\",", "_triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}',", "[ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors = [", "\"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\",", "def __format__(self, spec): self.i += 1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\"", "2}\" def test_not_equal(): # There's a special test for this because there's a", "{x}\").format(x=X()) == \"1 2\" _bad_missing_expressions = [ \"{}\", \"{ '\" \" {} \",", "== expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with", "class X: def __init__(self): self.i = 0 def __format__(self, spec): self.i += 1", "_bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"),", "\"{3! s}\" # no space before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ]", "== \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14", "fstr(\"{{}}\") assert template.format() == \"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1) ==", "elif version_info < (3, 6): _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError),", "(3, 6): _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ]", "assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert", "= (context[\"x\"] + \" \") * 500 assert actual == expected # Test", "\"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\",", "+ {y} = {x + y}\", x=1) assert template.format(y=2) == \"1 + 2", "test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format() == \"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\")", "(\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info", "= {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\")", "test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\"", "== \"3.14 \" if version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \"", "_backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert", "assert fstr(\"expr={ {x: y for x, y in [(1, 2), ]} }\").format() ==", "assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info >=", "_ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888", "1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result: 12.35\", \"result: 12.35\", \"result: 12.35\",", "test_backslashes_in_string_part(template, expected): assert fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\",", "fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info >= (3,", "# case in the f-string parser to look for != as not ending", "\"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions", "strings. d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\"", "[ \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \" 0xa\",", "d = {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert", "add=add) assert template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert template.format()", "(\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\",", "\"7\" # noqa: W503 ) _empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\",", "\"{3!ss:}\", \"{3!ss:s}\", ] for bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\",", "fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError):", "== \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() ==", "Normally it would, while looking for !s or !r. assert fstr(\"{3!=4}\").format() == \"True\"", "# Not a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" # Not a conversion, but", "test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def", "== \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{ 3}\").format() ==", "of expressions and constants. assert fstr(\"{1} {'x'} {'y'} \" * 1000).format() == \"1", "assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10,", "< (3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info < (3, 6): _causes_errors", "Test around 256. for i in range(250, 260): actual = make_template(i).format(**context) expected =", "\"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions =", "!s or !r. assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format()", "assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info >= (3, 0):", "\"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" # no space before conversion char", "assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def", "return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) ==", "test_not_equal(): # There's a special test for this because there's a special #", "y)}\", add=add) assert template.format(x=1, y=2) == \"3\" def test_even_double_brace_replacement(): template = fstr(\"{{}}\") assert", "template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) == \" 1.2\" def test_dict(): d", "\"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL", "test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X: def __init__(self): self.i = 0", "make_template(n).format(**context) # Test around 256. for i in range(250, 260): actual = make_template(i).format(**context)", "{x: y for x, y in [(1, 2), ]} }\").format() == \"expr={1: 2}\"", "\"2 3\"), (\"\\x20{3}\", \" 3\"), (\"2\\x20\", \"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \"", "[(\"{1000:j}\", SyntaxError)] elif version_info < (3, 6): _causes_errors = [ (\"{(lambda: 0):x}\", TypeError),", "TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors = [ (\"{(lambda: 0):x}\", TypeError),", "_format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected = \" -0xa\" if value < 0 else", "[ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5}", "+ y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a' + 'b' = 'ab'\" assert template.format(y=\"c\")", "n) for n in range(250, 260): make_template(n).format(**context) # Test around 256. for i", "= [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def", "actual = make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"] + \" \") * 500", "\"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds", "_invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X: def __init__(self): self.i", "\"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL", "\" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if", "pytest.raises(SyntaxError): fstr(template).format() _bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with", "\"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj)", "CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\",", "fstr(\"{x} + {y} = {x + y}\", x=1) assert template.format(y=2) == \"1 +", "expected = (context[\"x\"] + \" \") * 500 assert actual == expected #", "= [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info <", "template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \" 10\" assert template.format(x=3, width=4) ==", "a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" # Not a conversion, but show that", "== \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert", "this because there's a special # case in the f-string parser to look", "(\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\", \"2\\t3\"), (\"\\t{3}\", \"\\t3\"), (\"\\u0394\", \"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\",", "== \"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format() == \"7\" # noqa: W503", "i assert actual == expected actual = make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"]", "\"1 + 2 = 3\" assert template.format(y=3) == \"1 + 3 = 4\"", "fstr('{\"a\"!r}').format() == \"'a'\" if version_info >= (3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\" #", "] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\": 4, \"value\": 12.34567}", "}\").format() == \"3\" assert fstr(\"expr={ {x: y for x, y in [(1, 2),", "test for this because there's a special # case in the f-string parser", "3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info", "\"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'),", "\"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" # no space before conversion char \"{x!s{y}}\", \"{3!ss}\",", "\" assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info >= (3,", ">= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"),", "_causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\",", "template.format(x=1.2345, width=4, precision=2) == \" 1.2\" def test_dict(): d = {'\"': \"double-quote\", \"'\":", "These aren't comments, since they're in strings. d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format()", "[ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" # no space before", "\"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" # no space before conversion", "(\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError), ] else: _causes_errors = [ (\"{(lambda:", "'c' = 'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local = 2 # noqa:", "\"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with", "(3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format()", "= fstr(\"{x:{width}}\") assert template.format(x=10, width=3) == \" 10\" assert template.format(x=3, width=4) == \"", "def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions(): context = {\"x\": \"X\",", "the empty expression before the # missing closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\",", "DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format() ==", "is allowed in a format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [", "def test_format_with_function(): def add(x, y): return x + y template = fstr(\"{add(x, y)}\",", "expected # Test lots of expressions and constants. assert fstr(\"{1} {'x'} {'y'} \"", "before the # invalid conversion. \"{!x}\", \"{ !xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{", "!xr:a}\", \"{!}\", \"{:}\", # We find the empty expression before the # missing", "\"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={ {x: y for x, y", "bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with", "fstr(invalid).format() if version_info < (3, 0): _causes_errors = [(\"{1000:j}\", SyntaxError)] elif version_info <", "DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK", "(\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template,", "fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\" ).format()", "def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\",", "def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{", "\"\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \"", "expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected _ok_lambdas = [ (\"{(lambda", "y in [(1, 2), ]}}\").format() == \"expr={1: 2}\" def test_not_equal(): # There's a", "or !r. assert fstr(\"{3!=4}\").format() == \"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() ==", "def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"),", "== \"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() ==", "conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" # Not a conversion, but show that !", "Not a conversion, but show that ! is allowed in a format spec.", "TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\", _causes_errors) def test_errors(bad, etype): with", "lots of expressions and constants. assert fstr(\"{1} {'x'} {'y'} \" * 1000).format() ==", "'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'),", "def dict_inside_braces_with_padding(): template = fstr(\"{ {x: y} }\", x=\"a\") assert template.format(y=1) == \"{'a':", "comments, since they're in strings. d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\"", "with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"), (\"{2}\\t{3}\",", "assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{", "# expression. Normally it would, while looking for !s or !r. assert fstr(\"{3!=4}\").format()", ").format() == \"7\" # noqa: W503 ) _empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\",", "str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions = [ \"{}\", \"{ '\" \"", "assert fstr(template).format() == expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj:", "import version_info import fstr def test_basic(): template = fstr(\"{x} + {y} = {x", "for n in range(250, 260): make_template(n).format(**context) # Test around 256. for i in", "\"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates = [ (10,", "\"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\",", "]} }\").format() == \"expr={1: 2}\" assert fstr(\"expr={ {x: y for x, y in", "\"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info >= (3, 0): assert fstr('{\"a\"!a}').format() ==", "fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info >= (3, 0): assert", "in a format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\" \"{3!A}\"", "following # whitespace characters in additional to a space. \"{\\t\\f\\r\\n}\", # Catch the", "0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\": 4, \"value\":", "12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10,", "= [(\"{'{'}\", \"{\"), (\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format()", "\"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces)", "== \"3\" assert fstr(\"expr={ {x: y for x, y in [(1, 2), ]}", "== expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError):", "empty expression before the # missing closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\",", "for this because there's a special # case in the f-string parser to", "(\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\",", "space. \"{\\t\\f\\r\\n}\", # Catch the empty expression before the # invalid conversion. \"{!x}\",", "!xr}\", \"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", # We find the", "assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\"", "a_local = 2 # noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def", "@pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") == expected _bad_mismatched_braces = [", "assert template.format(x=3, width=4) == \" 3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2)", "2}\" assert fstr(\"expr={ {x: y for x, y in [(1, 2), ]}}\").format() ==", "\"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" # no space before conversion char \"{x!s{y}}\",", "@pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\",", "def test_not_equal(): # There's a special test for this because there's a special", "assert actual == expected actual = make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"] +", "\"{!x:}\", \"{!x:a}\", \"{ !xr:}\", \"{ !xr:a}\", \"{!}\", \"{:}\", # We find the empty", "\" {} \", \"{!r}\", \"{ !r}\", \"{10:{ }}\", \" { } \", #", "fstr('{\"a!r\"}').format() == \"a!r\" # Not a conversion, but show that ! is allowed", "fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format() ==", "= 'ab'\" assert template.format(y=\"c\") == \"'a' + 'c' = 'ac'\" _A_GLOBAL = 1", "there's a special # case in the f-string parser to look for !=", "\"3\" assert fstr(\"{1 + 2 }\").format() == \"3\" assert fstr(\"{ 1 + 2", "width=4, precision=2) == \" 1.2\" def test_dict(): d = {'\"': \"double-quote\", \"'\": \"single-quote\",", "\"{4:{/5}}\", \"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X:", "LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert fstr(template).format()", "def make_template(n, extra=\"\"): return fstr((\"{x} \" + extra) * n) for n in", "import pytest from sys import version_info import fstr def test_basic(): template = fstr(\"{x}", "(\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK", "== \"3\" assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert", "2 }\").format() == \"3\" def dict_inside_braces_with_padding(): template = fstr(\"{ {x: y} }\", x=\"a\")", "look for != as not ending an # expression. Normally it would, while", "(10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and", "def test_simple_fstr_evaluate(): a_local = 2 # noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1", "noqa: F841 assert fstr(\"{_A_GLOBAL} {a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\")", "{\"width\": 10, \"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context) == \"result: 12.35\" _format_hex_specifier_templates =", "\"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\"", "+ \" \") * i assert actual == expected actual = make_template(250, \"{x:{width}}", "in range(250, 260): make_template(n).format(**context) # Test around 256. for i in range(250, 260):", "find the empty expression before the # missing closing brace. \"{!\", \"{!s:\", \"{:\",", "r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format()", "\"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\"", "that ! is allowed in a format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions", "{} \", \"{!r}\", \"{ !r}\", \"{10:{ }}\", \" { } \", # The", "expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError):", "fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def __format__(self, spec): if not spec: return \"*\"", "# missing closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def", "expected): assert fstr(template, x=5).format() == expected _triple_quoted_strings = [ (\"{'''x'''}\", \"x\"), (\"{'''eric's'''}\", \"eric's\"),", "# whitespace characters in additional to a space. \"{\\t\\f\\r\\n}\", # Catch the empty", "== \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={ {x: y for x,", ") _empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\", \"'test'\"), ]", "CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\", _backlashes_in_string_part) def test_backslashes_in_string_part(template, expected): assert", "fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{ 3}\").format()", "== \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions():", "d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\",", "+ y template = fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2) == \"3\" def", "= [ \"result: {value:{width}.{precision}}\", \"result: {value:{width!r}.{precision}}\", \"result: {value:{width:0}.{precision:1}}\", \"result: {value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{", "test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1", "\"'a' + 'c' = 'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local = 2", "CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] ) @pytest.mark.parametrize(\"template, expected\",", "{x + y!r}\", x=\"a\") assert template.format(y=\"b\") == \"'a' + 'b' = 'ab'\" assert", "def foo(x): return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert", "as not ending an # expression. Normally it would, while looking for !s", "def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert", "== \"'a'\" # Not a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" # Not a", "space before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\", \"{3!ss:s}\", ] for bad in bad_conversions:", "{a_local}\").evaluate() == \"1 2\" def test_format_language_with_inner_fstr(): template = fstr(\"{x:{width}}\") assert template.format(x=10, width=3) ==", "y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template,", "'ac'\" _A_GLOBAL = 1 def test_simple_fstr_evaluate(): a_local = 2 # noqa: F841 assert", "== \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info >= (3, 0):", "\"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected", "in strings. d = {\"#\": \"hash\"} assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) ==", "test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() _backlashes_in_string_part = [ (\"\\t\", \"\\t\"), (r\"\\t\", \"\\\\t\"), (\"{2}\\t\", \"2\\t\"),", "! is allowed in a format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions =", "\"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template): with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() ==", "if version_info >= (3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\" # Not a conversion.", "\"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\", \"{\"),", "assert template.format() == \"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\"", "# There's a special test for this because there's a special # case", "Not a conversion. assert fstr('{\"a!r\"}').format() == \"a!r\" # Not a conversion, but show", "version_info >= (3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\" # Not a conversion. assert", "\"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected): assert fstr(template, x=5).format() == expected", "x=\"a\") assert template.format(y=\"b\") == \"'a' + 'b' = 'ab'\" assert template.format(y=\"c\") == \"'a'", "= [ \"{3!g}\" \"{3!A}\" \"{3!3}\" \"{3!G}\" \"{3!!}\" \"{3!:}\" \"{3! s}\" # no space", "\"width\": 1} def make_template(n, extra=\"\"): return fstr((\"{x} \" + extra) * n) for", "LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"),", "< (3, 6): _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", SyntaxError),", "with pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert ( fstr( \"\"\"{3+", "+ 2 }\").format() == \"3\" assert fstr(\"{ 1 + 2 }\").format() == \"3\"", "assert fstr(\"expr={ {x: y for x, y in [(1, 2), ]}}\").format() == \"expr={1:", "y for x, y in [(1, 2), ]}}\").format() == \"expr={1: 2}\" def test_not_equal():", "!= as not ending an # expression. Normally it would, while looking for", "expected): assert fstr(template).format() == expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class", "== \"3\" def test_call(): def foo(x): return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) ==", "assert fstr(\"{'#'}\").format() == \"#\" assert fstr(\"{d['#']}\").format(d=d) == \"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with", "def __init__(self): self.i = 0 def __format__(self, spec): self.i += 1 return str(self.i)", "\"{3:s\", \"{{{\", \"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces", "\"double-quote\", \"'\": \"single-quote\", \"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\"", "(\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template,", "precision=2) == \" 1.2\" def test_dict(): d = {'\"': \"double-quote\", \"'\": \"single-quote\", \"foo\":", "def test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context) ==", "# Catch the empty expression before the # invalid conversion. \"{!x}\", \"{ !xr}\",", "DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"), (\"2\\N{GREEK", "} \", # The Python parser ignores also the following # whitespace characters", "expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def __format__(self, spec):", "assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format()", "assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format() == \"3\"", "add(x, y): return x + y template = fstr(\"{add(x, y)}\", add=add) assert template.format(x=1,", "assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() ==", "\"x\"), (\"{'''eric's'''}\", \"eric's\"), ('{\"x\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"x\" \"\"\"eric\"s\"\"\"}', 'xeric\"s'), ('{\"\"\"eric\"s\"\"\" \"y\"}', 'eric\"sy'),", "test_call(): def foo(x): return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces():", "expressions and constants. assert fstr(\"{1} {'x'} {'y'} \" * 1000).format() == \"1 x", "'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ] @pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings)", "[(1, 2), ]}}\").format() == \"expr={1: 2}\" def test_not_equal(): # There's a special test", "template = fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1 +", "width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected = \" -0xa\" if", "0 else \" 0xa\" assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates = [\"{'s'!r{':10'}}\", \"{4:{/5}}\",", "fstr(\"{3!s:}\").format() == \"3\" def test_call(): def foo(x): return \"x=\" + str(x) assert fstr(\"{foo(10)}\").format(foo=foo)", "x + y template = fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2) == \"3\"", "def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{ 3}\").format() == \"3\" assert", "\"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\", \"{3!s:3\", \"x{\", \"x{x\", \"{x\", \"{3:s\", \"{{{\", \"{{}}{\",", "* 500 assert actual == expected # Test lots of expressions and constants.", "assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"expr={ {x: y for x, y in", "== \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) ==", "0xa\", \" 0xa\", \" -0xa\", \" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def", "+= 1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions = [ \"{}\",", "\"{{}}{\", \"{\", ] @pytest.mark.parametrize(\"template\", _bad_mismatched_braces) def test_bad_mismatched_braces(template): with pytest.raises(SyntaxError): fstr(template).format() _ok_mismatched_braces = [(\"{'{'}\",", "(\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\", _ok_lambdas) def test_lambda(template, expected): assert fstr(template,", "\"{{}\", \"{{}}}\", \"}\", \"x}\", \"x}x\", \"{3:}>10}\", \"{3:}}>10}\", \"{3:{{>10}\", \"{3\", \"{3!\", \"{3:\", \"{3!s\", \"{3!s:\",", "assert fstr('{\"a!r\"}').format() == \"a!r\" # Not a conversion, but show that ! is", "_triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format()", "assert template.format(x=10, width=3) == \" 10\" assert template.format(x=3, width=4) == \" 3\" template", "{4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value, template\", _format_hex_specifier_templates) def test_format_hex_specifier_expressions(value, template): expected = \"", "fstr(template).format() == expected _backslashes_in_expression = [r\"{\\}\", r\"{\\'a\\'}\", r\"{\\t3}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _backslashes_in_expression) def test_no_backslashes_in_expression_part(template):", "+ extra) * n) for n in range(250, 260): make_template(n).format(**context) # Test around", "256. for i in range(250, 260): actual = make_template(i).format(**context) expected = (context[\"x\"] +", "noqa: W503 ) _empty_format_specifiers = [ (\"{x}\", \"test\"), (\"{x:}\", \"test\"), (\"{x!s:}\", \"test\"), (\"{x!r:}\",", "allowed in a format spec. assert fstr(\"{3.14:!<10.10}\").format() == \"3.14!!!!!!\" bad_conversions = [ \"{3!g}\"", "\"test\"), (\"{x!r:}\", \"'test'\"), ] @pytest.mark.parametrize(\"template, expected\", _empty_format_specifiers) def test_empty_format_specifier(template, expected): assert fstr(template).format(x=\"test\") ==", "(context[\"x\"] + \" \") * i assert actual == expected actual = make_template(250,", "== \"'a' + 'b' = 'ab'\" assert template.format(y=\"c\") == \"'a' + 'c' =", "def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected def test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def", "template = fstr(\"{x!r} + {y!r} = {x + y!r}\", x=\"a\") assert template.format(y=\"b\") ==", "\"{'s'!{'r'}}\"] @pytest.mark.parametrize(\"template\", _invalid_format_specifier_templates) def test_invalid_format_specifier_expressions(template): with pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X: def", "assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert", "assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info >= (3, 0): assert fstr('{\"a\"!a}').format() == \"'a'\"", "\"\\\\u0394\"), (r\"\\u0394\", \"\\\\u0394\"), (\"{2}\\u0394\", \"2\\u0394\"), (\"{2}\\u0394{3}\", \"2\\u03943\"), (\"\\u0394{3}\", \"\\u03943\"), (\"\\x20\", \" \"), (r\"\\x20\",", "\"foo\": \"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) ==", "\"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK", "== \"*\" assert fstr(\"{Obj():}\").format(Obj=Obj) == \"*\" assert fstr(\"{3:}\").format() == \"3\" assert fstr(\"{3!s:}\").format() ==", "brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions) def test_missing_expression(template): with pytest.raises(SyntaxError):", "range(250, 260): make_template(n).format(**context) # Test around 256. for i in range(250, 260): actual", "* 1000).format() == \"1 x y \" * 1000 _format_specifier_width_precision_templates = [ \"result:", "\"{3!ss:s}\", ] for bad in bad_conversions: with pytest.raises(SyntaxError): fstr(bad).format() _invalid_expressions = [\"{a[4)}\", \"{a(4]}\"]", "test_format_width_precision_specifier_expressions(template): context = {\"width\": 10, \"precision\": 4, \"value\": 12.34567} assert fstr(template).format(**context) == \"result:", "\"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"), (10, \"{value:#{3 != {4:5} and width}x}\"), ] @pytest.mark.parametrize(\"value,", "a special test for this because there's a special # case in the", "template.format(x=10, width=3) == \" 10\" assert template.format(x=3, width=4) == \" 3\" template =", "\"True\" assert fstr(\"{3!=4:}\").format() == \"True\" assert fstr(\"{3!=4!s}\").format() == \"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\"", "3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info >= (3, 0): _backlashes_in_string_part.extend( [", "\"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() == \"3.14 \" if version_info >= (3, 0): assert", "\"2 \"), (\"2\\x203\", \"2 3\"), (\"\\x203\", \" 3\"), (\"\\\\{6*7}\", \"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ]", "\", \"{!r}\", \"{ !r}\", \"{10:{ }}\", \" { } \", # The Python", "test_missing_variable(): with pytest.raises(NameError): fstr(\"v:{value}\").format() def test_missing_format_spec(): class Obj: def __format__(self, spec): if not", "return \"*\" return spec assert fstr(\"{Obj():x}\").format(Obj=Obj) == \"x\" assert fstr(\"{Obj()}\").format(Obj=Obj) == \"*\" assert", "+ str(x) assert fstr(\"{foo(10)}\").format(foo=foo) == \"x=10\" def test_leading_trailing_spaces(): assert fstr(\"{ 3}\").format() == \"3\"", "\"{3!!}\" \"{3!:}\" \"{3! s}\" # no space before conversion char \"{x!s{y}}\", \"{3!ss}\", \"{3!ss:}\",", "x, y in [(1, 2), ]} }\").format() == \"expr={1: 2}\" assert fstr(\"expr={ {x:", "= 3\" assert template.format(y=3) == \"1 + 3 = 4\" def test_basic_format_language(): template", "\"{a(4]}\"] @pytest.mark.parametrize(\"invalid\", _invalid_expressions) def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3, 0):", "\"True\" assert fstr(\"{3!=4!s:.3}\").format() == \"Tru\" def test_conversions(): assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert", "2 }\").format() == \"3\" assert fstr(\"{ 1 + 2 }\").format() == \"3\" def", "assert fstr(template).format() == expected _ok_lambdas = [ (\"{(lambda y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888'", "[ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\", _causes_errors) def", "@pytest.mark.parametrize(\"template, expected\", _triple_quoted_strings) def test_expressions_with_triple_quoted_strings(template, expected): assert fstr(template).format() == expected def test_missing_variable(): with", "template.format(y=\"b\") == \"'a' + 'b' = 'ab'\" assert template.format(y=\"c\") == \"'a' + 'c'", "pytest.raises(SyntaxError): fstr(template).format() def test_newlines_in_expressions(): assert fstr(\"{0}\").format() == \"0\" assert ( fstr( \"\"\"{3+ 4}\"\"\"", "+ \" \") * 500 assert actual == expected # Test lots of", "== \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\" assert fstr(\"{d['foo']}\").format(d=d) == \"bar\" def test_format_with_function(): def", "(\"{'}'}\", \"}\")] @pytest.mark.parametrize(\"template, expected\", _ok_mismatched_braces) def test_ok_mistmatched_braces(template, expected): assert fstr(template).format() == expected _ok_lambdas", "{value:{1}{0:0}.{precision:1}}\", \"result: {value:{ 1}{ 0:0}.{ precision:1}}\", ] _format_specifier_expression_expecteds = [ \"result: 12.35\", \"result:", "expected actual = make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"] + \" \") *", "}\", x=\"a\") assert template.format(y=1) == \"{'a': 1}\" def test_hash_in_string(): # These aren't comments,", "1 return str(self.i) fstr(\"{x} {x}\").format(x=X()) == \"1 2\" _bad_missing_expressions = [ \"{}\", \"{", "260): actual = make_template(i).format(**context) expected = (context[\"x\"] + \" \") * i assert", "width=3) == \" 10\" assert template.format(x=3, width=4) == \" 3\" template = fstr(\"{x:{width}.{precision}}\")", "if version_info >= (3, 0): assert fstr(\"{3.14!a:10.10}\").format() == \"3.14 \" assert fstr('{\"a\"}').format() ==", "TypeError), (\"{1000:j}\", ValueError), ] @pytest.mark.parametrize(\"bad, etype\", _causes_errors) def test_errors(bad, etype): with pytest.raises(etype): fstr(bad).format()", "def test_basic(): template = fstr(\"{x} + {y} = {x + y}\", x=1) assert", "-0xa\", \" -0xa\", \" 0xa\", ] @pytest.mark.parametrize(\"template\", _format_specifier_width_precision_templates) def test_format_width_precision_specifier_expressions(template): context = {\"width\":", "\"2\\u0394\"), (\"2\\N{GREEK CAPITAL LETTER DELTA}3\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}3\", \"\\u03943\"), ] )", "= fstr(\"{{}}\") assert template.format() == \"{}\" def test_odd_double_brace_replacement(): template = fstr(\"{{{x}}}\") assert template.format(x=1)", "2\" _bad_missing_expressions = [ \"{}\", \"{ '\" \" {} \", \"{!r}\", \"{ !r}\",", "test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions(): context = {\"x\": \"X\", \"width\":", "# Test around 256. for i in range(250, 260): actual = make_template(i).format(**context) expected", "SyntaxError), ] else: _causes_errors = [ (\"{(lambda: 0):x}\", TypeError), (\"{(0,):x}\", TypeError), (\"{1000:j}\", ValueError),", "\"\\\\42\"), (r\"\\{6*7}\", \"\\\\42\"), ] if version_info >= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"),", "\"bar\"} assert fstr(\"\"\"{d[\"'\"]}\"\"\").format(d=d) == \"single-quote\" assert fstr(\"\"\"{d['\"']}\"\"\").format(d=d) == \"double-quote\" assert fstr('{d[\"foo\"]}').format(d=d) == \"bar\"", "= fstr(\"{{{x}}}\") assert template.format(x=1) == \"{1}\" def test_trailing_and_leading_space(): assert fstr(\"{ 1 + 2}\").format()", "== expected actual = make_template(250, \"{x:{width}} \").format(**context) expected = (context[\"x\"] + \" \")", "f-string parser to look for != as not ending an # expression. Normally", "fstr(\"{ 1 + 2}\").format() == \"3\" assert fstr(\"{1 + 2 }\").format() == \"3\"", "\"result: 12.35\" _format_hex_specifier_templates = [ (10, \"{value:#{1}0x}\"), (10, \"{value:{'#'}1{0}{'x'}}\"), (-10, \"{value:-{'#'}1{0}x}\"), (-10, \"{value:{'-'}#{1}0{'x'}}\"),", "= fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) == \" 1.2\" def test_dict(): d =", "characters in additional to a space. \"{\\t\\f\\r\\n}\", # Catch the empty expression before", "y:x*y)('8')!r}\", \"'88888'\"), (\"{(lambda y:x*y)('8')!r:10}\", \"'88888' \"), (\"{(lambda y:x*y)('8'):10}\", \"88888 \"), ] @pytest.mark.parametrize(\"template, expected\",", "('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"y\"}', 'xeric\"sy'), ('{\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ('{r\"\"\"x\"\"\" \"\"\"eric\"s\"\"\" \"\"\"y\"\"\"}', 'xeric\"sy'), ]", "}\").format() == \"3\" def dict_inside_braces_with_padding(): template = fstr(\"{ {x: y} }\", x=\"a\") assert", "fstr(\"{ 3}\").format() == \"3\" assert fstr(\"{3 }\").format() == \"3\" assert fstr(\"{3 }\").format() ==", "y): return x + y template = fstr(\"{add(x, y)}\", add=add) assert template.format(x=1, y=2)", "\"hash\" @pytest.mark.parametrize(\"brace\", \"])}\") def test_unclosed_braces(brace): with pytest.raises(SyntaxError): fstr(\"{%s}\" % brace).format() def test_many_expressions(): context", "\"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\", \"\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}\", \"2\\u0394\"),", "DELTA}\", \"2\\u0394\"), (\"{2}\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"2\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}{3}\", \"\\u03943\"), (\"2\\N{GREEK", "template.format(x=3, width=4) == \" 3\" template = fstr(\"{x:{width}.{precision}}\") assert template.format(x=1.2345, width=4, precision=2) ==", "the # missing closing brace. \"{!\", \"{!s:\", \"{:\", \"{:x\", \"{\\xa0}\", ] @pytest.mark.parametrize(\"template\", _bad_missing_expressions)", "_bad_parens_in_expressions = [\"{,}\", \"{,}\", \"{3)+(4}\", \"{\\n}\"] @pytest.mark.parametrize(\"template\", _bad_parens_in_expressions) def test_bad_parens_in_expressions(template): with pytest.raises(SyntaxError): fstr(template).format()", "\"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \"result: 12.35\", \" 0xa\", \" 0xa\", \"", "pytest.raises(SyntaxError): fstr(template).format() def test_side_effect_order(): class X: def __init__(self): self.i = 0 def __format__(self,", "\"\\\\42\"), ] if version_info >= (3, 0): _backlashes_in_string_part.extend( [ (\"\\U00000394\", \"\\u0394\"), (r\"\\U00000394\", \"\\\\U00000394\"),", "assert fstr(\"{3.14:10.10}\").format() == \" 3.14\" assert fstr(\"{3.14!s:10.10}\").format() == \"3.14 \" assert fstr(\"{3.14!r:10.10}\").format() ==", "value < 0 else \" 0xa\" assert fstr(template).format(value=value, width=10) == expected _invalid_format_specifier_templates =", "\"3.14 \" assert fstr('{\"a\"}').format() == \"a\" assert fstr('{\"a\"!r}').format() == \"'a'\" if version_info >=", "def test_invalid_expressions(invalid): with pytest.raises(SyntaxError): fstr(invalid).format() if version_info < (3, 0): _causes_errors = [(\"{1000:j}\",", "fstr(\"{ 1 + 2 }\").format() == \"3\" def dict_inside_braces_with_padding(): template = fstr(\"{ {x:", "assert template.format(y=1) == \"{'a': 1}\" def test_hash_in_string(): # These aren't comments, since they're", "== \"3\" assert fstr(\"{ 1 + 2 }\").format() == \"3\" def dict_inside_braces_with_padding(): template", "\"\\\\U00000394\"), (r\"\\U00000394\", \"\\\\U00000394\"), (\"{2}\\U00000394\", \"2\\u0394\"), (\"{2}\\U00000394{3}\", \"2\\u03943\"), (\"\\U00000394{3}\", \"\\u03943\"), (\"\\N{GREEK CAPITAL LETTER DELTA}\"," ]
[ "<reponame>peopledoc/django-formidable from .json_migrations import latest_version default_app_config = 'formidable.app.FormidableConfig' version = '7.2.0.dev0' json_version =", "from .json_migrations import latest_version default_app_config = 'formidable.app.FormidableConfig' version = '7.2.0.dev0' json_version = latest_version" ]
[ "open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved to cache folder') html =", "print(f'{file_name} saved to cache folder') html = response.text else: print(f'Reading {file_name} from cache')", "url: url to load :param file_name: name of cached file :return: soup to", "recommended = [vote_option for (vote_option, votes) in poll.items() if check(votes)] except KeyError: return", "\"\"\" try: recommended = [vote_option for (vote_option, votes) in poll.items() if check(votes)] except", "= map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally", "= dict() collection_item['name'] = collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name') if version is", "game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers']", "Skip header, we don't care about this if first: first = False continue", "game in collection: game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text", "KeyError: return [None] if len(recommended) == 0: return [None] else: return recommended def", "json.dump(collection, fp, indent=2) print(f'JSON file written to cache folder') def parse_collection_row(collection_row): \"\"\" Parse", "\"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best)", "against :return: None if nothing is recommended or a list of recommended player", "def map_poll(poll, check): \"\"\" Map the voting poll results dict to a list", "os import configparser import requests as req # noinspection PyUnresolvedReferences from bs4 import", "about this if first: first = False continue # Append parsed collection row", "votes) in poll.items() if check(votes)] except KeyError: return [None] if len(recommended) == 0:", "collection_table.find_all('tr'): # Skip header, we don't care about this if first: first =", "recommended def is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes): return", "and convert to json \"\"\" # Find table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY']", "game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\":", "cache') with open(collection_file, 'r', encoding='utf-8') as fp: html = fp.read() if file_name.endswith('html'): return", "Get the collection and convert to json \"\"\" # Find table containing collection", "check(votes)] except KeyError: return [None] if len(recommended) == 0: return [None] else: return", "import configparser import requests as req # noinspection PyUnresolvedReferences from bs4 import BeautifulSoup,", "# Append parsed collection row to collection list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed", "config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate over collection table,", "collection_row.find('td', class_='collection_plays') if plays.a is None: collection_item['plays'] = 0 else: collection_item['plays'] = int(plays.a.text)", "int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended'])", "url to load :param file_name: name of cached file :return: soup to parse", "\"\"\" Parse a single collection table row into a dict :param collection_row: the", "config = configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load data either from web or", "import os import configparser import requests as req # noinspection PyUnresolvedReferences from bs4", "geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if plays.a is None:", "if already present :param url: url to load :param file_name: name of cached", "collection_item = dict() collection_item['name'] = collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name') if version", "= collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating", "encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved to cache folder') html = response.text else:", "def load_data(url, file_name): \"\"\" Load data either from web or cache if already", "version = collection_row.find('div', class_='geekitem_name') if version is not None: collection_item['version'] = version.text.strip() year", ":param url: url to load :param file_name: name of cached file :return: soup", "collection_item['plays'] = 0 else: collection_item['plays'] = int(plays.a.text) return collection_item def map_poll(poll, check): \"\"\"", "print(f'Reading {file_name} from cache') with open(collection_file, 'r', encoding='utf-8') as fp: html = fp.read()", "# coding=utf-8 import json import os import configparser import requests as req #", "html = response.text else: print(f'Reading {file_name} from cache') with open(collection_file, 'r', encoding='utf-8') as", "'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays", "in collection: game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for", "poll[result['numplayers']] = { str(child['value']): child['numvotes'] for child in result.children if isinstance(child, Tag) }", "noinspection PyUnresolvedReferences from bs4 import BeautifulSoup, Tag config = configparser.ConfigParser() def load_data(url, file_name):", "collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating ==", "check :param poll: The voting poll consisting of vote topic with recommendations by", "cache folder') def parse_collection_row(collection_row): \"\"\" Parse a single collection table row into a", "json \"\"\" # Find table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'],", "if not os.path.exists(collection_file): print(f'Reading {file_name} page from web') response = req.get(url) with open(collection_file,", "is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally dump data", "= fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def", "dict :param collection_row: the row to parse :return: a dictionary containing row values", "topic with recommendations by the community :param check: Checking function to validate against", "class_='collection_plays') if plays.a is None: collection_item['plays'] = 0 else: collection_item['plays'] = int(plays.a.text) return", "BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get the collection", "dictionary containing row values \"\"\" collection_item = dict() collection_item['name'] = collection_row.find('a', class_='primary').text version", "file') print(f'\\nCollecting game data:') for game in collection: game_id = game.get(\"id\") game_data =", "domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime']", "convert to json \"\"\" # Find table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table", "numbers. \"\"\" try: recommended = [vote_option for (vote_option, votes) in poll.items() if check(votes)]", "poll results dict to a list containing only voting options that pass the", "fp: fp.write(response.text) print(f'{file_name} saved to cache folder') html = response.text else: print(f'Reading {file_name}", "collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating'] = geek_rating", "if tag is None: return None else: return tag.text def parse_poll(poll_data): if poll_data", "game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally dump data as", "collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if plays.a", "get_collection(): \"\"\" Get the collection and convert to json \"\"\" # Find table", "else: collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if", "if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading {file_name}", "return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get the collection and convert to json", "results: poll[result['numplayers']] = { str(child['value']): child['numvotes'] for child in result.children if isinstance(child, Tag)", "\"\"\" collection_item = dict() collection_item['name'] = collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name') if", "return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended'])", "cached file :return: soup to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file =", "data as JSON print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as", "Load data either from web or cache if already present :param url: url", "don't care about this if first: first = False continue # Append parsed", "collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating']", "map_poll(suggested_numplayers, is_recommended) # Finally dump data as JSON print(f'\\nWriting result to JSON:') with", "class_='geekitem_name') if version is not None: collection_item['version'] = version.text.strip() year = collection_row.find('span', class_='smallerfont')", "collection_row.find('div', class_='geekitem_name') if version is not None: collection_item['version'] = version.text.strip() year = collection_row.find('span',", "= load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text", "'lxml') def get_collection(): \"\"\" Get the collection and convert to json \"\"\" #", "care about this if first: first = False continue # Append parsed collection", "for collection_row in collection_table.find_all('tr'): # Skip header, we don't care about this if", "with open(collection_file, 'r', encoding='utf-8') as fp: html = fp.read() if file_name.endswith('html'): return BeautifulSoup(html,", "\"\"\" Get the collection and convert to json \"\"\" # Find table containing", "# Find table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection", "nothing is recommended or a list of recommended player numbers. \"\"\" try: recommended", "encoding='utf-8') as fp: html = fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'):", "game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain']", "version is not None: collection_item['version'] = version.text.strip() year = collection_row.find('span', class_='smallerfont') if year", "the voting poll results dict to a list containing only voting options that", "{len(collection)} items, writing JSON file') print(f'\\nCollecting game data:') for game in collection: game_id", "as JSON print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp:", "file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get the collection and convert to", "= [domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers']", "either from web or cache if already present :param url: url to load", "plays.a is None: collection_item['plays'] = 0 else: collection_item['plays'] = int(plays.a.text) return collection_item def", "BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get the collection and convert to json \"\"\"", "category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip()", ":return: a dictionary containing row values \"\"\" collection_item = dict() collection_item['name'] = collection_row.find('a',", "with open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved to cache folder') html", "map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers,", "recommendations by the community :param check: Checking function to validate against :return: None", "cache if already present :param url: url to load :param file_name: name of", "file :return: soup to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'],", "for (vote_option, votes) in poll.items() if check(votes)] except KeyError: return [None] if len(recommended)", "return collection_item def map_poll(poll, check): \"\"\" Map the voting poll results dict to", "load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for", "dict first = True for collection_row in collection_table.find_all('tr'): # Skip header, we don't", "the collection and convert to json \"\"\" # Find table containing collection collection_file_key", "= [category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain in game_data.find_all('boardgamesubdomain')]", "collection table row into a dict :param collection_row: the row to parse :return:", "Recommended']) def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag): if", "= list() # Iterate over collection table, store results to dict first =", "collection_row in collection_table.find_all('tr'): # Skip header, we don't care about this if first:", "collection list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON file') print(f'\\nCollecting", "options that pass the check :param poll: The voting poll consisting of vote", "= True for collection_row in collection_table.find_all('tr'): # Skip header, we don't care about", "from cache') with open(collection_file, 'r', encoding='utf-8') as fp: html = fp.read() if file_name.endswith('html'):", "a dictionary containing row values \"\"\" collection_item = dict() collection_item['name'] = collection_row.find('a', class_='primary').text", "collection_item['geek_rating'] = None else: collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays =", "None: collection_item['version'] = version.text.strip() year = collection_row.find('span', class_='smallerfont') if year is not None:", "len(recommended) == 0: return [None] else: return recommended def is_best(votes): return int(votes['Best']) >=", "None else: poll = dict() results = poll_data.find_all('results') for result in results: poll[result['numplayers']]", "map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally dump", "written to cache folder') def parse_collection_row(collection_row): \"\"\" Parse a single collection table row", "poll = dict() results = poll_data.find_all('results') for result in results: poll[result['numplayers']] = {", "in poll.items() if check(votes)] except KeyError: return [None] if len(recommended) == 0: return", "= map_poll(suggested_numplayers, is_recommended) # Finally dump data as JSON print(f'\\nWriting result to JSON:')", "f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection, fp, indent=2) print(f'JSON file written to cache", "# noinspection PyUnresolvedReferences from bs4 import BeautifulSoup, Tag config = configparser.ConfigParser() def load_data(url,", "folder') def parse_collection_row(collection_row): \"\"\" Parse a single collection table row into a dict", "parse_poll(poll_data): if poll_data is None: return None else: poll = dict() results =", "plays = collection_row.find('td', class_='collection_plays') if plays.a is None: collection_item['plays'] = 0 else: collection_item['plays']", "{file_name} page from web') response = req.get(url) with open(collection_file, 'w', encoding='utf-8') as fp:", "import requests as req # noinspection PyUnresolvedReferences from bs4 import BeautifulSoup, Tag config", "tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating'] = None", "we don't care about this if first: first = False continue # Append", "of vote topic with recommendations by the community :param check: Checking function to", "poll_data is None: return None else: poll = dict() results = poll_data.find_all('results') for", "if nothing is recommended or a list of recommended player numbers. \"\"\" try:", "except KeyError: return [None] if len(recommended) == 0: return [None] else: return recommended", ":param check: Checking function to validate against :return: None if nothing is recommended", "game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally dump data as JSON print(f'\\nWriting result to", "= collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if plays.a is None: collection_item['plays'] =", "indent=2) print(f'JSON file written to cache folder') def parse_collection_row(collection_row): \"\"\" Parse a single", "in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] =", "# Finally dump data as JSON print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'),", "None else: collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays')", "collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate over", "that pass the check :param poll: The voting poll consisting of vote topic", "result.children if isinstance(child, Tag) } return poll if __name__ == '__main__': config.read(\"config.ini\") get_collection()", "items, writing JSON file') print(f'\\nCollecting game data:') for game in collection: game_id =", "def parse_collection_row(collection_row): \"\"\" Parse a single collection table row into a dict :param", "Recommended']) def tex_or_none(tag): if tag is None: return None else: return tag.text def", "the row to parse :return: a dictionary containing row values \"\"\" collection_item =", "into a dict :param collection_row: the row to parse :return: a dictionary containing", "\"\"\" Load data either from web or cache if already present :param url:", "is recommended or a list of recommended player numbers. \"\"\" try: recommended =", "fp: json.dump(collection, fp, indent=2) print(f'JSON file written to cache folder') def parse_collection_row(collection_row): \"\"\"", "if plays.a is None: collection_item['plays'] = 0 else: collection_item['plays'] = int(plays.a.text) return collection_item", "a dict :param collection_row: the row to parse :return: a dictionary containing row", "'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved to cache folder') html = response.text", "= geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if plays.a is", "or cache if already present :param url: url to load :param file_name: name", "print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection, fp,", "= tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers =", ">= int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not", "vote topic with recommendations by the community :param check: Checking function to validate", "game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers", "list of recommended player numbers. \"\"\" try: recommended = [vote_option for (vote_option, votes)", "JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection, fp, indent=2) print(f'JSON file", "not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading {file_name} page", "'r', encoding='utf-8') as fp: html = fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if", "of cached file :return: soup to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file", "row values \"\"\" collection_item = dict() collection_item['name'] = collection_row.find('a', class_='primary').text version = collection_row.find('div',", "= map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally dump data as JSON", "game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] =", "tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers']", "collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading {file_name} page from web') response", "fp, indent=2) print(f'JSON file written to cache folder') def parse_collection_row(collection_row): \"\"\" Parse a", ">= int(votes['Not Recommended']) def tex_or_none(tag): if tag is None: return None else: return", "poll: The voting poll consisting of vote topic with recommendations by the community", "present :param url: url to load :param file_name: name of cached file :return:", "as fp: fp.write(response.text) print(f'{file_name} saved to cache folder') html = response.text else: print(f'Reading", "for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON file') print(f'\\nCollecting game data:')", "== 0: return [None] else: return recommended def is_best(votes): return int(votes['Best']) >= int(votes['Recommended'])", "int(votes['Not Recommended']) def tex_or_none(tag): if tag is None: return None else: return tag.text", "is None: return None else: poll = dict() results = poll_data.find_all('results') for result", "is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended)", "= game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for category in game_data.find_all('boardgamecategory')]", ":param collection_row: the row to parse :return: a dictionary containing row values \"\"\"", "dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON file') print(f'\\nCollecting game data:') for game", "\"\"\" # Find table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems')", "= tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] =", "year = collection_row.find('span', class_='smallerfont') if year is not None: collection_item['year'] = year.text[1:-1] collection_item['id']", "== 'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip()", "None: collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating", "= [vote_option for (vote_option, votes) in poll.items() if check(votes)] except KeyError: return [None]", "[None] else: return recommended def is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended'])", "<reponame>doofmars/bgg-quartets # coding=utf-8 import json import os import configparser import requests as req", "def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag): if tag", "else: print(f'Reading {file_name} from cache') with open(collection_file, 'r', encoding='utf-8') as fp: html =", "if first: first = False continue # Append parsed collection row to collection", "return None else: return tag.text def parse_poll(poll_data): if poll_data is None: return None", "= dict() results = poll_data.find_all('results') for result in results: poll[result['numplayers']] = { str(child['value']):", "The voting poll consisting of vote topic with recommendations by the community :param", ":return: None if nothing is recommended or a list of recommended player numbers.", "for result in results: poll[result['numplayers']] = { str(child['value']): child['numvotes'] for child in result.children", "collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON file') print(f'\\nCollecting game data:') for game in", "recommended or a list of recommended player numbers. \"\"\" try: recommended = [vote_option", "to cache folder') html = response.text else: print(f'Reading {file_name} from cache') with open(collection_file,", "= os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading {file_name} page from web') response =", "parsed collection row to collection list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items,", "req # noinspection PyUnresolvedReferences from bs4 import BeautifulSoup, Tag config = configparser.ConfigParser() def", "= load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate over collection table, store results", "collection table, store results to dict first = True for collection_row in collection_table.find_all('tr'):", "for child in result.children if isinstance(child, Tag) } return poll if __name__ ==", "+ int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag): if tag is None: return None", "configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load data either from web or cache if", "dict() results = poll_data.find_all('results') for result in results: poll[result['numplayers']] = { str(child['value']): child['numvotes']", "return [None] if len(recommended) == 0: return [None] else: return recommended def is_best(votes):", "return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get the", "None if nothing is recommended or a list of recommended player numbers. \"\"\"", "print(f'JSON file written to cache folder') def parse_collection_row(collection_row): \"\"\" Parse a single collection", "the check :param poll: The voting poll consisting of vote topic with recommendations", "from web or cache if already present :param url: url to load :param", "file written to cache folder') def parse_collection_row(collection_row): \"\"\" Parse a single collection table", "collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating =", "to cache folder') def parse_collection_row(collection_row): \"\"\" Parse a single collection table row into", "Parse a single collection table row into a dict :param collection_row: the row", "True for collection_row in collection_table.find_all('tr'): # Skip header, we don't care about this", "poll consisting of vote topic with recommendations by the community :param check: Checking", "to json \"\"\" # Find table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table =", "to validate against :return: None if nothing is recommended or a list of", "tag.text def parse_poll(poll_data): if poll_data is None: return None else: poll = dict()", "first = False continue # Append parsed collection row to collection list for", "{ str(child['value']): child['numvotes'] for child in result.children if isinstance(child, Tag) } return poll", "is not None: collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div',", "game['boardgamecategory'] = [category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain in", "dict() collection_item['name'] = collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name') if version is not", "table row into a dict :param collection_row: the row to parse :return: a", "Append parsed collection row to collection list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)}", "else: return recommended def is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended']) def", "response.text else: print(f'Reading {file_name} from cache') with open(collection_file, 'r', encoding='utf-8') as fp: html", "the community :param check: Checking function to validate against :return: None if nothing", "voting poll consisting of vote topic with recommendations by the community :param check:", "from web') response = req.get(url) with open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name}", "player numbers. \"\"\" try: recommended = [vote_option for (vote_option, votes) in poll.items() if", "for game in collection: game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] =", "= parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers']", "class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A':", "collection_item def map_poll(poll, check): \"\"\" Map the voting poll results dict to a", "is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag): if tag is", "result in results: poll[result['numplayers']] = { str(child['value']): child['numvotes'] for child in result.children if", "page from web') response = req.get(url) with open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text)", "load :param file_name: name of cached file :return: soup to parse \"\"\" if", "is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best']) +", "file_name) if not os.path.exists(collection_file): print(f'Reading {file_name} page from web') response = req.get(url) with", "year is not None: collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] =", "class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating'] = None else:", "collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate", "\"\"\" Map the voting poll results dict to a list containing only voting", "row into a dict :param collection_row: the row to parse :return: a dictionary", "if geek_rating == 'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating'] = geek_rating collection_item['status'] =", "int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag):", "voting options that pass the check :param poll: The voting poll consisting of", "open(collection_file, 'r', encoding='utf-8') as fp: html = fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser')", "folder') html = response.text else: print(f'Reading {file_name} from cache') with open(collection_file, 'r', encoding='utf-8')", "of recommended player numbers. \"\"\" try: recommended = [vote_option for (vote_option, votes) in", "tag is None: return None else: return tag.text def parse_poll(poll_data): if poll_data is", "parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] =", "collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate over collection table, store", "collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name') if version is not None: collection_item['version'] =", "= collection_row.find('div', class_='geekitem_name') if version is not None: collection_item['version'] = version.text.strip() year =", "containing only voting options that pass the check :param poll: The voting poll", "child in result.children if isinstance(child, Tag) } return poll if __name__ == '__main__':", "if poll_data is None: return None else: poll = dict() results = poll_data.find_all('results')", "suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1]", "map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally dump data as JSON print(f'\\nWriting", "return tag.text def parse_poll(poll_data): if poll_data is None: return None else: poll =", "= False continue # Append parsed collection row to collection list for later", "= { str(child['value']): child['numvotes'] for child in result.children if isinstance(child, Tag) } return", "fp: html = fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html,", "configparser import requests as req # noinspection PyUnresolvedReferences from bs4 import BeautifulSoup, Tag", "parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file):", "saved to cache folder') html = response.text else: print(f'Reading {file_name} from cache') with", "coding=utf-8 import json import os import configparser import requests as req # noinspection", "= version.text.strip() year = collection_row.find('span', class_='smallerfont') if year is not None: collection_item['year'] =", "class_='smallerfont') if year is not None: collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2]", "not None: collection_item['version'] = version.text.strip() year = collection_row.find('span', class_='smallerfont') if year is not", "= configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load data either from web or cache", "consisting of vote topic with recommendations by the community :param check: Checking function", "if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get the collection and convert", "load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate over collection table, store results to", "fp.write(response.text) print(f'{file_name} saved to cache folder') html = response.text else: print(f'Reading {file_name} from", "print(f'\\nCollecting game data:') for game in collection: game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1',", "dump data as JSON print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8')", "tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll',", "collection_item['plays'] = int(plays.a.text) return collection_item def map_poll(poll, check): \"\"\" Map the voting poll", "game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime'))", "= collection_row.find('span', class_='smallerfont') if year is not None: collection_item['year'] = year.text[1:-1] collection_item['id'] =", "def tex_or_none(tag): if tag is None: return None else: return tag.text def parse_poll(poll_data):", "game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] =", "map_poll(poll, check): \"\"\" Map the voting poll results dict to a list containing", "results = poll_data.find_all('results') for result in results: poll[result['numplayers']] = { str(child['value']): child['numvotes'] for", "fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection():", "name of cached file :return: soup to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY'])", "game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) #", "later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON file') print(f'\\nCollecting game data:') for", "'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get the collection and", "to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection, fp, indent=2) print(f'JSON", "return recommended def is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes):", "collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if plays.a is None: collection_item['plays'] = 0", "None else: return tag.text def parse_poll(poll_data): if poll_data is None: return None else:", "class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating'] = geek_rating collection_item['status']", "file_name): \"\"\" Load data either from web or cache if already present :param", "to dict first = True for collection_row in collection_table.find_all('tr'): # Skip header, we", "BeautifulSoup, Tag config = configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load data either from", "table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list()", "int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >=", "web') response = req.get(url) with open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved", "= 0 else: collection_item['plays'] = int(plays.a.text) return collection_item def map_poll(poll, check): \"\"\" Map", "False continue # Append parsed collection row to collection list for later dumping", "file_name: name of cached file :return: soup to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']):", "list containing only voting options that pass the check :param poll: The voting", "containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() #", "0: return [None] else: return recommended def is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) +", "response = req.get(url) with open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved to", "results to dict first = True for collection_row in collection_table.find_all('tr'): # Skip header,", "a single collection table row into a dict :param collection_row: the row to", "def parse_poll(poll_data): if poll_data is None: return None else: poll = dict() results", "encoding='UTF-8') as fp: json.dump(collection, fp, indent=2) print(f'JSON file written to cache folder') def", "row to parse :return: a dictionary containing row values \"\"\" collection_item = dict()", "collection row to collection list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing", "only voting options that pass the check :param poll: The voting poll consisting", "table, store results to dict first = True for collection_row in collection_table.find_all('tr'): #", "header, we don't care about this if first: first = False continue #", "poll.items() if check(votes)] except KeyError: return [None] if len(recommended) == 0: return [None]", "= year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td',", "data either from web or cache if already present :param url: url to", "values \"\"\" collection_item = dict() collection_item['name'] = collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name')", "= tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers,", "= response.text else: print(f'Reading {file_name} from cache') with open(collection_file, 'r', encoding='utf-8') as fp:", "geek_rating == 'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td',", "Finally dump data as JSON print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w',", "a list containing only voting options that pass the check :param poll: The", "poll_data.find_all('results') for result in results: poll[result['numplayers']] = { str(child['value']): child['numvotes'] for child in", "game data:') for game in collection: game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml')", "= tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating'] =", "int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag): if tag is None: return", "if version is not None: collection_item['version'] = version.text.strip() year = collection_row.find('span', class_='smallerfont') if", "else: return tag.text def parse_poll(poll_data): if poll_data is None: return None else: poll", "voting poll results dict to a list containing only voting options that pass", "check: Checking function to validate against :return: None if nothing is recommended or", "as req # noinspection PyUnresolvedReferences from bs4 import BeautifulSoup, Tag config = configparser.ConfigParser()", "if check(votes)] except KeyError: return [None] if len(recommended) == 0: return [None] else:", "= poll_data.find_all('results') for result in results: poll[result['numplayers']] = { str(child['value']): child['numvotes'] for child", "for domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers'))", "results dict to a list containing only voting options that pass the check", "in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers']", "not None: collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext'))", "from bs4 import BeautifulSoup, Tag config = configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load", "if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\"", "if len(recommended) == 0: return [None] else: return recommended def is_best(votes): return int(votes['Best'])", "PyUnresolvedReferences from bs4 import BeautifulSoup, Tag config = configparser.ConfigParser() def load_data(url, file_name): \"\"\"", "in results: poll[result['numplayers']] = { str(child['value']): child['numvotes'] for child in result.children if isinstance(child,", ":return: soup to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name)", "is_best) game['recommended_numplayers'] = map_poll(suggested_numplayers, is_recommended) # Finally dump data as JSON print(f'\\nWriting result", "if year is not None: collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating']", "web or cache if already present :param url: url to load :param file_name:", "bs4 import BeautifulSoup, Tag config = configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load data", "open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection, fp, indent=2) print(f'JSON file written to", "# Iterate over collection table, store results to dict first = True for", "pass the check :param poll: The voting poll consisting of vote topic with", "return None else: poll = dict() results = poll_data.find_all('results') for result in results:", "is_recommended) # Finally dump data as JSON print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'],", "json import os import configparser import requests as req # noinspection PyUnresolvedReferences from", "child['numvotes'] for child in result.children if isinstance(child, Tag) } return poll if __name__", "int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag): if tag is None: return None else:", "Find table containing collection collection_file_key = config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection =", "or a list of recommended player numbers. \"\"\" try: recommended = [vote_option for", "def is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best'])", "first = True for collection_row in collection_table.find_all('tr'): # Skip header, we don't care", "import BeautifulSoup, Tag config = configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load data either", "tex_or_none(tag): if tag is None: return None else: return tag.text def parse_poll(poll_data): if", "str(child['value']): child['numvotes'] for child in result.children if isinstance(child, Tag) } return poll if", "collection and convert to json \"\"\" # Find table containing collection collection_file_key =", "collection_item['name'] = collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name') if version is not None:", "None: return None else: poll = dict() results = poll_data.find_all('results') for result in", "return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended']) def tex_or_none(tag): if tag is None:", "= None else: collection_item['geek_rating'] = geek_rating collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td',", "int(plays.a.text) return collection_item def map_poll(poll, check): \"\"\" Map the voting poll results dict", "= config['fetch']['COLLECTION_FILE_KEY'] collection_table = load_data(config['fetch']['URL'], file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate over collection", "class_='primary').text version = collection_row.find('div', class_='geekitem_name') if version is not None: collection_item['version'] = version.text.strip()", "requests as req # noinspection PyUnresolvedReferences from bs4 import BeautifulSoup, Tag config =", "(vote_option, votes) in poll.items() if check(votes)] except KeyError: return [None] if len(recommended) ==", "os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading {file_name} page from web')", "os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading {file_name} page from web') response = req.get(url)", "first: first = False continue # Append parsed collection row to collection list", "version.text.strip() year = collection_row.find('span', class_='smallerfont') if year is not None: collection_item['year'] = year.text[1:-1]", "else: poll = dict() results = poll_data.find_all('results') for result in results: poll[result['numplayers']] =", "in collection_table.find_all('tr'): # Skip header, we don't care about this if first: first", "as fp: json.dump(collection, fp, indent=2) print(f'JSON file written to cache folder') def parse_collection_row(collection_row):", "[domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers')) game['maxplayers'] =", "writing JSON file') print(f'\\nCollecting game data:') for game in collection: game_id = game.get(\"id\")", "is not None: collection_item['version'] = version.text.strip() year = collection_row.find('span', class_='smallerfont') if year is", "a list of recommended player numbers. \"\"\" try: recommended = [vote_option for (vote_option,", "Map the voting poll results dict to a list containing only voting options", "validate against :return: None if nothing is recommended or a list of recommended", "= map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers, is_best) game['recommended_numplayers'] =", "collection_row: the row to parse :return: a dictionary containing row values \"\"\" collection_item", "containing row values \"\"\" collection_item = dict() collection_item['name'] = collection_row.find('a', class_='primary').text version =", "load_data(url, file_name): \"\"\" Load data either from web or cache if already present", "= collection_row.find('a', class_='primary').text version = collection_row.find('div', class_='geekitem_name') if version is not None: collection_item['version']", "else: collection_item['plays'] = int(plays.a.text) return collection_item def map_poll(poll, check): \"\"\" Map the voting", "[vote_option for (vote_option, votes) in poll.items() if check(votes)] except KeyError: return [None] if", "to load :param file_name: name of cached file :return: soup to parse \"\"\"", "import json import os import configparser import requests as req # noinspection PyUnresolvedReferences", "single collection table row into a dict :param collection_row: the row to parse", "to parse :return: a dictionary containing row values \"\"\" collection_item = dict() collection_item['name']", "tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers,", "= int(plays.a.text) return collection_item def map_poll(poll, check): \"\"\" Map the voting poll results", "0 else: collection_item['plays'] = int(plays.a.text) return collection_item def map_poll(poll, check): \"\"\" Map the", "class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if plays.a is None: collection_item['plays'] = 0 else:", "file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml') def get_collection(): \"\"\" Get", "community :param check: Checking function to validate against :return: None if nothing is", "game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers']", "os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading {file_name} page from", "with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection, fp, indent=2) print(f'JSON file written", "soup to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if", "function to validate against :return: None if nothing is recommended or a list", "'w', encoding='UTF-8') as fp: json.dump(collection, fp, indent=2) print(f'JSON file written to cache folder')", "JSON print(f'\\nWriting result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection,", "already present :param url: url to load :param file_name: name of cached file", "check): \"\"\" Map the voting poll results dict to a list containing only", "with recommendations by the community :param check: Checking function to validate against :return:", "collection_item['status'] = collection_row.find('td', class_='collection_status').text.strip() plays = collection_row.find('td', class_='collection_plays') if plays.a is None: collection_item['plays']", "to collection list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON file')", "try: recommended = [vote_option for (vote_option, votes) in poll.items() if check(votes)] except KeyError:", "{file_name} from cache') with open(collection_file, 'r', encoding='utf-8') as fp: html = fp.read() if", "html = fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return BeautifulSoup(html, 'lxml')", "collection: game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for category", "print(f'Reading {file_name} page from web') response = req.get(url) with open(collection_file, 'w', encoding='utf-8') as", "tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0]", "as fp: html = fp.read() if file_name.endswith('html'): return BeautifulSoup(html, 'html.parser') if file_name.endswith('xml'): return", ":param file_name: name of cached file :return: soup to parse \"\"\" if not", "continue # Append parsed collection row to collection list for later dumping collection.append(parse_collection_row(collection_row))", "over collection table, store results to dict first = True for collection_row in", "data:') for game in collection: game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory']", "Checking function to validate against :return: None if nothing is recommended or a", "\"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not os.path.exists(collection_file): print(f'Reading", "is None: collection_item['plays'] = 0 else: collection_item['plays'] = int(plays.a.text) return collection_item def map_poll(poll,", "list() # Iterate over collection table, store results to dict first = True", "def get_collection(): \"\"\" Get the collection and convert to json \"\"\" # Find", "collection_item['version'] = version.text.strip() year = collection_row.find('span', class_='smallerfont') if year is not None: collection_item['year']", "= tex_or_none(game_data.find('minplayers')) game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"}))", "= collection_row.find('td', class_='collection_plays') if plays.a is None: collection_item['plays'] = 0 else: collection_item['plays'] =", "for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image'] =", "is None: return None else: return tag.text def parse_poll(poll_data): if poll_data is None:", "+ int(votes['Not Recommended']) def is_recommended(votes): return int(votes['Best']) + int(votes['Recommended']) >= int(votes['Not Recommended']) def", "req.get(url) with open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved to cache folder')", "return [None] else: return recommended def is_best(votes): return int(votes['Best']) >= int(votes['Recommended']) + int(votes['Not", ":param poll: The voting poll consisting of vote topic with recommendations by the", "game['boardgamesubdomain'] = [domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image'] = tex_or_none(game_data.find('image')).strip() game['minplayers'] = tex_or_none(game_data.find('minplayers'))", "not os.path.exists(collection_file): print(f'Reading {file_name} page from web') response = req.get(url) with open(collection_file, 'w',", "game['maxplayers'] = tex_or_none(game_data.find('maxplayers')) game['playingtime'] = tex_or_none(game_data.find('playingtime')) suggested_numplayers = parse_poll(game_data.find('poll', attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] =", "dict to a list containing only voting options that pass the check :param", "in result.children if isinstance(child, Tag) } return poll if __name__ == '__main__': config.read(\"config.ini\")", "None: collection_item['plays'] = 0 else: collection_item['plays'] = int(plays.a.text) return collection_item def map_poll(poll, check):", "# Skip header, we don't care about this if first: first = False", "collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if", "row to collection list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON", "Iterate over collection table, store results to dict first = True for collection_row", "= collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating'] =", "collection = list() # Iterate over collection table, store results to dict first", "this if first: first = False continue # Append parsed collection row to", "Tag config = configparser.ConfigParser() def load_data(url, file_name): \"\"\" Load data either from web", "f'{game_id}.xml') game['boardgamecategory'] = [category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain", "game_id = game.get(\"id\") game_data = load_data(f'https://boardgamegeek.com/xmlapi/boardgame/{game_id}?stats=1', f'{game_id}.xml') game['boardgamecategory'] = [category.text for category in", "recommended player numbers. \"\"\" try: recommended = [vote_option for (vote_option, votes) in poll.items()", "= req.get(url) with open(collection_file, 'w', encoding='utf-8') as fp: fp.write(response.text) print(f'{file_name} saved to cache", "parse_collection_row(collection_row): \"\"\" Parse a single collection table row into a dict :param collection_row:", "geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip() if geek_rating == 'N/A': collection_item['geek_rating'] = None else: collection_item['geek_rating']", "JSON file') print(f'\\nCollecting game data:') for game in collection: game_id = game.get(\"id\") game_data", "None: return None else: return tag.text def parse_poll(poll_data): if poll_data is None: return", "print(f'Parsed {len(collection)} items, writing JSON file') print(f'\\nCollecting game data:') for game in collection:", "collection_row.find('span', class_='smallerfont') if year is not None: collection_item['year'] = year.text[1:-1] collection_item['id'] = collection_row.find('a',", "file_name=f'{collection_file_key}.html').find(id='collectionitems') collection = list() # Iterate over collection table, store results to dict", "attrs={\"name\": \"suggested_numplayers\"})) game['best_minplayers'] = map_poll(suggested_numplayers, is_best)[0] game['best_maxpleyers'] = map_poll(suggested_numplayers, is_best)[-1] game['best_numplayers'] = map_poll(suggested_numplayers,", "os.path.exists(collection_file): print(f'Reading {file_name} page from web') response = req.get(url) with open(collection_file, 'w', encoding='utf-8')", "store results to dict first = True for collection_row in collection_table.find_all('tr'): # Skip", "[category.text for category in game_data.find_all('boardgamecategory')] game['boardgamesubdomain'] = [domain.text for domain in game_data.find_all('boardgamesubdomain')] game['image']", "list for later dumping collection.append(parse_collection_row(collection_row)) print(f'Parsed {len(collection)} items, writing JSON file') print(f'\\nCollecting game", "by the community :param check: Checking function to validate against :return: None if", "cache folder') html = response.text else: print(f'Reading {file_name} from cache') with open(collection_file, 'r',", "year.text[1:-1] collection_item['id'] = collection_row.find('a', class_='primary')['href'].split('/')[2] collection_item['user_rating'] = tex_or_none(collection_row.find('div', class_='ratingtext')) geek_rating = collection_row.find('td', class_='collection_bggrating').text.strip()", "to parse \"\"\" if not os.path.exists(config['fetch']['CACHE_DIRECTORY']): os.mkdir(config['fetch']['CACHE_DIRECTORY']) collection_file = os.path.join(config['fetch']['CACHE_DIRECTORY'], file_name) if not", "parse :return: a dictionary containing row values \"\"\" collection_item = dict() collection_item['name'] =", "result to JSON:') with open(os.path.join(config['fetch']['RESULT_DIRECTORY'], f'{collection_file_key}.json'), 'w', encoding='UTF-8') as fp: json.dump(collection, fp, indent=2)", "[None] if len(recommended) == 0: return [None] else: return recommended def is_best(votes): return", "to a list containing only voting options that pass the check :param poll:" ]
[ "columns = [Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict in", "self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name,", "for sublist in lists_of_tasks for task in sublist] def fetch_tasks_by_column(self, column, board_name, token):", "for column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\",", "tasks for %s column in %s board\", column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url", "f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for", "board_name) def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else", "= f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code)", "return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks): return [task for sublist in", "from .column import Column from .swimlane import Swimlane from .task import Task class", "return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for task in self.tasks]) def __str__(self): return", "from .swimlane import Swimlane from .task import Task class Board(object): def __init__(self, token):", "for task in self.tasks]) def __str__(self): return f\"{{ Id: {self.id}. Name: {self.name}. Columns:", "requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict = resp.json() tasks = [Task(t_dict) for t_dict", ".swimlane import Swimlane from .task import Task class Board(object): def __init__(self, token): self.log", "[task for sublist in lists_of_tasks for task in sublist] def fetch_tasks_by_column(self, column, board_name,", "final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code: %s\",", "swimlanes = [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def", "resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for task in self.tasks])", "class Board(object): def __init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL =", "resp_dict = resp.json() tasks = [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id,", "resp.status_code) return resp.json() def parse_board(self, board_dict): columns = [Column(col_dict) for col_dict in board_dict[\"columns\"]]", "fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp", "token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return resp.json() def", "token): self.log.info(\"Pulling token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return", "= f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status", "self.log.info(\"Pulling token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return resp.json()", "from .task import Task class Board(object): def __init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL", "self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for column in", "def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks for %s column in %s board\",", "sublist] def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks for %s column in %s", "code: %s\", resp.status_code) resp_dict = resp.json() tasks = [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]]", "= f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column =", "[Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return", "token): self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns,", "(self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for column", "def __init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id,", "token): self.log.info(\"Pulling tasks for %s column in %s board\", column.name, board_name) def fetch_tasks(column_id,", "board_dict): columns = [Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict", "board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks): return [task for sublist", "column in %s board\", column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url", "for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def", "requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return resp.json() def parse_board(self, board_dict): columns = [Column(col_dict)", "for col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"],", "Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}. Tasks: {self.tasks} }}\" def __repr__(self): return str(self)", ".column import Column from .swimlane import Swimlane from .task import Task class Board(object):", "return resp.json() def parse_board(self, board_dict): columns = [Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes", "resp.json() tasks = [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return", "import Task class Board(object): def __init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\"", "[Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId)", "flatten_tasks(self, lists_of_tasks): return [task for sublist in lists_of_tasks for task in sublist] def", "if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for", "return [task for sublist in lists_of_tasks for task in sublist] def fetch_tasks_by_column(self, column,", "Task class Board(object): def __init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL", "= self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for column in self.columns] self.tasks =", "resp.json() def parse_board(self, board_dict): columns = [Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes =", "fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for task in self.tasks]) def __str__(self): return f\"{{", "in lists_of_tasks for task in sublist] def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks", "= [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self,", "f\"{{ Id: {self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}. Tasks: {self.tasks} }}\" def", "return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for task in self.tasks]) def", "board_name, token): self.log.info(\"Pulling tasks for %s column in %s board\", column.name, board_name) def", "board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks): return [task for sublist in lists_of_tasks", "%s column in %s board\", column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\"", "self.log.info(\"Pulling tasks for %s column in %s board\", column.name, board_name) def fetch_tasks(column_id, next_task=None):", "= [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return", "%s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return resp.json() def parse_board(self,", "column, board_name, token): self.log.info(\"Pulling tasks for %s column in %s board\", column.name, board_name)", "%s\", resp.status_code) return resp.json() def parse_board(self, board_dict): columns = [Column(col_dict) for col_dict in", "column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token)", "lists_of_tasks): return [task for sublist in lists_of_tasks for task in sublist] def fetch_tasks_by_column(self,", "f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict", "[self.fetch_tasks_by_column(column, self.name, token) for column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token):", "self.log.info(\"Status code: %s\", resp.status_code) return resp.json() def parse_board(self, board_dict): columns = [Column(col_dict) for", "def get_spent_time(self): return sum([task.spent for task in self.tasks]) def __str__(self): return f\"{{ Id:", "tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for task in self.tasks]) def __str__(self):", "f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict = resp.json() tasks =", "next_task else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict = resp.json()", "tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def", "column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task", "sum([task.spent for task in self.tasks]) def __str__(self): return f\"{{ Id: {self.id}. Name: {self.name}.", "= requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict = resp.json() tasks = [Task(t_dict) for", ".task import Task class Board(object): def __init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL =", "for task in sublist] def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks for %s", "tasks = [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks", "in %s board\", column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url =", "token) for column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token:", "= self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status", "fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks for %s column in %s board\", column.name,", "self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column", "resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent", "self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code:", "resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return resp.json() def parse_board(self, board_dict): columns", "self.tasks]) def __str__(self): return f\"{{ Id: {self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}.", "import KFLOW_BASE_URL from .column import Column from .swimlane import Swimlane from .task import", "swimlanes def flatten_tasks(self, lists_of_tasks): return [task for sublist in lists_of_tasks for task in", "self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes)", "board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes", "__init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name,", "lists_of_tasks for task in sublist] def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks for", "lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks): return [task", "in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks): return [task for", "f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code:", "= [Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]]", "for %s column in %s board\", column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url =", "def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\"", "[Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks):", "logging from ..constants import KFLOW_BASE_URL from .column import Column from .swimlane import Swimlane", "in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token) resp", "code: %s\", resp.status_code) return resp.json() def parse_board(self, board_dict): columns = [Column(col_dict) for col_dict", "requests import logging from ..constants import KFLOW_BASE_URL from .column import Column from .swimlane", "in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns,", "if next_task else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict =", "sublist in lists_of_tasks for task in sublist] def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling", "from ..constants import KFLOW_BASE_URL from .column import Column from .swimlane import Swimlane from", "import logging from ..constants import KFLOW_BASE_URL from .column import Column from .swimlane import", "{self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}. Tasks: {self.tasks} }}\" def __repr__(self): return", "self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for column in self.columns]", "import requests import logging from ..constants import KFLOW_BASE_URL from .column import Column from", "Column from .swimlane import Swimlane from .task import Task class Board(object): def __init__(self,", "Board(object): def __init__(self, token): self.log = logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\"", "logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token))", "def flatten_tasks(self, lists_of_tasks): return [task for sublist in lists_of_tasks for task in sublist]", "in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return", "= resp.json() tasks = [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"]))", "t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self):", "token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return resp.json() def parse_board(self, board_dict):", "def __str__(self): return f\"{{ Id: {self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}. Tasks:", "col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'],", "%s\", resp.status_code) resp_dict = resp.json() tasks = [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if", "= [self.fetch_tasks_by_column(column, self.name, token) for column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self,", "<gh_stars>0 import requests import logging from ..constants import KFLOW_BASE_URL from .column import Column", "self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for column in self.columns] self.tasks", "base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp = requests.get(final_url)", "self.log.info(\"Status code: %s\", resp.status_code) resp_dict = resp.json() tasks = [Task(t_dict) for t_dict in", "f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column,", "= logging.getLogger() self.FULL_BOARD_URL = f\"{KFLOW_BASE_URL}/board\" self.FULL_TASKS_URL = f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) =", "Id: {self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}. Tasks: {self.tasks} }}\" def __repr__(self):", "def parse_board(self, board_dict): columns = [Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict)", "board\", column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if", "self.name, token) for column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling", "KFLOW_BASE_URL from .column import Column from .swimlane import Swimlane from .task import Task", "__str__(self): return f\"{{ Id: {self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}. Tasks: {self.tasks}", "task in self.tasks]) def __str__(self): return f\"{{ Id: {self.id}. Name: {self.name}. Columns: {self.columns}.", "get_spent_time(self): return sum([task.spent for task in self.tasks]) def __str__(self): return f\"{{ Id: {self.id}.", "else f\"{base_url}\" resp = requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict = resp.json() tasks", "def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\",", "tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for task in", "= f\"{KFLOW_BASE_URL}/tasks\" (self.id, self.name, self.columns, self.swimlanes) = self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token)", "fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code)", "return sum([task.spent for task in self.tasks]) def __str__(self): return f\"{{ Id: {self.id}. Name:", "columns, swimlanes def flatten_tasks(self, lists_of_tasks): return [task for sublist in lists_of_tasks for task", "= requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\") self.log.info(\"Status code: %s\", resp.status_code) return resp.json() def parse_board(self, board_dict): columns =", "return f\"{{ Id: {self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes: {self.swimlanes}. Tasks: {self.tasks} }}\"", "resp_dict[0].get(\"tasksLimited\"): tasks.extend(fetch_tasks(column_id, resp_dict[0][\"nextTaskId\"])) return tasks return fetch_tasks(column.uniqueId) def get_spent_time(self): return sum([task.spent for task", "resp = requests.get(final_url) self.log.info(\"Status code: %s\", resp.status_code) resp_dict = resp.json() tasks = [Task(t_dict)", "%s board\", column.name, board_name) def fetch_tasks(column_id, next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\"", "import Swimlane from .task import Task class Board(object): def __init__(self, token): self.log =", "Swimlane from .task import Task class Board(object): def __init__(self, token): self.log = logging.getLogger()", "import Column from .swimlane import Swimlane from .task import Task class Board(object): def", "board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks): return [task for sublist in lists_of_tasks for", "for lane_dict in board_dict[\"swimlanes\"]] return board_dict[\"_id\"], board_dict['name'], columns, swimlanes def flatten_tasks(self, lists_of_tasks): return", "parse_board(self, board_dict): columns = [Column(col_dict) for col_dict in board_dict[\"columns\"]] swimlanes = [Swimlane(lane_dict) for", "in self.tasks]) def __str__(self): return f\"{{ Id: {self.id}. Name: {self.name}. Columns: {self.columns}. Swimlanes:", "self.columns] self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token) resp =", "in sublist] def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks for %s column in", "self.tasks = self.flatten_tasks(tasks_by_column) def fetch_board_json(self, token): self.log.info(\"Pulling token: %s\", token) resp = requests.get(f\"{self.FULL_BOARD_URL}?apiToken={token}\")", "resp.status_code) resp_dict = resp.json() tasks = [Task(t_dict) for t_dict in resp_dict[0][\"tasks\"]] if resp_dict[0].get(\"tasksLimited\"):", "self.parse_board(self.fetch_board_json(token)) tasks_by_column = [self.fetch_tasks_by_column(column, self.name, token) for column in self.columns] self.tasks = self.flatten_tasks(tasks_by_column)", "next_task=None): base_url = f\"{self.FULL_TASKS_URL}?apiToken={token}&columnId={column_id}\" final_url = f\"{base_url}&startTaskId={next_task}\" if next_task else f\"{base_url}\" resp =", "task in sublist] def fetch_tasks_by_column(self, column, board_name, token): self.log.info(\"Pulling tasks for %s column", "..constants import KFLOW_BASE_URL from .column import Column from .swimlane import Swimlane from .task" ]
[ "hydroDL.post import axplot, figplot import matplotlib.pyplot as plt import importlib from astropy.timeseries import", "import importlib from astropy.timeseries import LombScargle import pandas as pd import numpy as", "= (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y = y-np.nanmean(y) nt = len(xx)", "ls.power(freq) prob = ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo]", "DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig,", "axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG", "dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x = (t.astype('datetime64[D]') -", "sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose() rMat =", "for b in (dfSP[0] > 1).tolist(): cLst.append('r') if b is True else cLst.append('b')", "= np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power = ls.power(freq) prob = ls.false_alarm_probability(power) ind", "1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan # fourier df =", "dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1) x = range(len(dfSP)) cLst = list() for", "= np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()),", "6 in temp or 7 in temp: rMat[k, 0] = 1 if 182", "pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig, ax", "6)) for k in range(3): mm = axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12)", "plt.subplots(3, 1, figsize=(8, 6)) for k in range(3): mm = axplot.mapPoint(axM[k], lat, lon,", "map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values", "lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1, figsize=(8, 6))", "import kPath from hydroDL.app import waterQuality, DGSA from hydroDL.data import gageII, usgs, gridMET", "k in range(3): mm = axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly signal')", "in (dfSP[0] > 1).tolist(): cLst.append('r') if b is True else cLst.append('b') ax.barh(x, dfSP[0].values,", "import os import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP = dict()", "siteNo in enumerate(siteNoLst): temp = dictP[siteNo] if 6 in temp or 7 in", "ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan # fourier df = dfObs[dfObs.notna().values]", "ul] = np.nan # fourier df = dfObs[dfObs.notna().values] tt = dfObs.index.values xx =", "nt = len(xx) freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power = ls.power(freq)", "dict() for k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs =", "fourier df = dfObs[dfObs.notna().values] tt = dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t", "= ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist()", "= (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y", "ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst", "pu, pc = np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3])", "= plt.subplots(3, 1, figsize=(8, 6)) for k in range(3): mm = axplot.mapPoint(axM[k], lat,", "wqData.siteNoLst dictP = dict() for k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo,", "plot map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon =", "for k, siteNo in enumerate(siteNoLst): temp = dictP[siteNo] if 6 in temp or", "= waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP = dict() for k, siteNo in enumerate(siteNoLst):", "siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1, figsize=(8,", "rm outlier df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV = y[y < np.percentile(y,", "outlier df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV = y[y < np.percentile(y, 99)]", "signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG =", "plt.subplots(1, 1) x = range(len(dfSP)) cLst = list() for b in (dfSP[0] >", "np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan # fourier df = dfObs[dfObs.notna().values] tt =", "df['00955'].values yV = y[y < np.percentile(y, 99)] yV = yV[yV > np.percentile(y, 1)]", "# fourier df = dfObs[dfObs.notna().values] tt = dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float)", "DGSA from hydroDL.data import gageII, usgs, gridMET from hydroDL.master import basins from hydroDL.post", "return_counts=True) temp = np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for k, siteNo in", "waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP = dict() for k, siteNo in enumerate(siteNoLst): print(siteNo)", "7 in temp: rMat[k, 0] = 1 if 182 in temp: rMat[k, 1]", "cLst = list() for b in (dfSP[0] > 1).tolist(): cLst.append('r') if b is", "= dictP[siteNo] if 6 in temp or 7 in temp: rMat[k, 0] =", "pMat = dfG.values dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh()", "dfG = gageII.updateCode(dfG) pMat = dfG.values dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3)", "range(3): mm = axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly", "axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG)", "= pd.tolist() pLst = sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst), return_counts=True) temp =", "fig, ax = plt.subplots(1, 1) x = range(len(dfSP)) cLst = list() for b", "= np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst),", "temp: rMat[k, 1] = 1 if 365 in temp: rMat[k, 2] = 1", "rMat[k, 0] = 1 if 182 in temp: rMat[k, 1] = 1 if", "= sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose() rMat", "2] = 1 # plot map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat", "= plt.subplots(1, 1) x = range(len(dfSP)) cLst = list() for b in (dfSP[0]", "= np.nan # fourier df = dfObs[dfObs.notna().values] tt = dfObs.index.values xx = (tt.astype('datetime64[D]')", "temp: rMat[k, 2] = 1 # plot map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'],", "rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig, ax =", "mm = axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal')", "= gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat = dfG.values dfS = DGSA.DGSA_light( pMat,", "dfSP = dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1) x = range(len(dfSP)) cLst =", "in enumerate(siteNoLst): temp = dictP[siteNo] if 6 in temp or 7 in temp:", "= LombScargle(x, y) power = ls.power(freq) prob = ls.false_alarm_probability(power) ind = np.where(prob <", "wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP = dict() for k, siteNo in", "= dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1) x = range(len(dfSP)) cLst = list()", "1 # plot map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values", "= range(len(dfSP)) cLst = list() for b in (dfSP[0] > 1).tolist(): cLst.append('r') if", "waterQuality, DGSA from hydroDL.data import gageII, usgs, gridMET from hydroDL.master import basins from", "x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y = y-np.nanmean(y) nt =", "axplot, figplot import matplotlib.pyplot as plt import importlib from astropy.timeseries import LombScargle import", "dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV = y[y < np.percentile(y, 99)] yV = yV[yV", "freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power = ls.power(freq) prob = ls.false_alarm_probability(power)", "import axplot, figplot import matplotlib.pyplot as plt import importlib from astropy.timeseries import LombScargle", "np import os import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP =", "= dfObs[dfObs.notna().values] tt = dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values", "np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power = ls.power(freq) prob = ls.false_alarm_probability(power) ind =", "temp or 7 in temp: rMat[k, 0] = 1 if 182 in temp:", "y[y < np.percentile(y, 99)] yV = yV[yV > np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5", "yV = y[y < np.percentile(y, 99)] yV = yV[yV > np.percentile(y, 1)] ul", "lon = dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1, figsize=(8, 6)) for k in", "axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal')", "LombScargle(x, y) power = ls.power(freq) prob = ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0]", "= dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x = (t.astype('datetime64[D]')", "temp = np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for k, siteNo in enumerate(siteNoLst):", "np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst), return_counts=True)", "from hydroDL.data import gageII, usgs, gridMET from hydroDL.master import basins from hydroDL.post import", "< np.percentile(y, 99)] yV = yV[yV > np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955']", "= y[y < np.percentile(y, 99)] yV = yV[yV > np.percentile(y, 1)] ul =", "= np.zeros([len(siteNoLst), 3]) for k, siteNo in enumerate(siteNoLst): temp = dictP[siteNo] if 6", "in range(3): mm = axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half", "cLst.append('r') if b is True else cLst.append('b') ax.barh(x, dfSP[0].values, color=cLst) ax.set_yticks(x) ax.set_yticklabels(dfSP.index.tolist()) plt.tight_layout()", "waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df = dfObs[dfObs['00955'].notna().values] y", "['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df = dfObs[dfObs['00955'].notna().values] y =", "(dfSP[0] > 1).tolist(): cLst.append('r') if b is True else cLst.append('b') ax.barh(x, dfSP[0].values, color=cLst)", "pd.tolist() pLst = sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu,", "range(len(dfSP)) cLst = list() for b in (dfSP[0] > 1).tolist(): cLst.append('r') if b", "ind = np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst =", "in temp: rMat[k, 2] = 1 # plot map dfCrd = gageII.readData( varLst=['LAT_GAGE',", "= waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df = dfObs[dfObs['00955'].notna().values]", "as pd import numpy as np import os import time wqData = waterQuality.DataModelWQ('Silica64')", "siteNoLst = wqData.siteNoLst dictP = dict() for k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs", "figsize=(8, 6)) for k in range(3): mm = axplot.mapPoint(axM[k], lat, lon, rMat[:, k],", "basins from hydroDL.post import axplot, figplot import matplotlib.pyplot as plt import importlib from", "np.percentile(y, 99)] yV = yV[yV > np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] >", "182 in temp: rMat[k, 1] = 1 if 365 in temp: rMat[k, 2]", "t = df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y =", "figplot import matplotlib.pyplot as plt import importlib from astropy.timeseries import LombScargle import pandas", "np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for k, siteNo in enumerate(siteNoLst): temp =", "1] = 1 if 365 in temp: rMat[k, 2] = 1 # plot", "= gageII.updateCode(dfG) pMat = dfG.values dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax", "dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM,", "< 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()), []) pu,", "s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst)", "df['00955'].values y = y-np.nanmean(y) nt = len(xx) freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x,", "signal') axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat =", "dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP =", "# rm outlier df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV = y[y <", "dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values", "import LombScargle import pandas as pd import numpy as np import os import", "astropy.timeseries import LombScargle import pandas as pd import numpy as np import os", "= wqData.siteNoLst dictP = dict() for k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs =", "df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y = y-np.nanmean(y) nt", "hydroDL.master import basins from hydroDL.post import axplot, figplot import matplotlib.pyplot as plt import", "= waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV", "dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1, figsize=(8, 6)) for k", "> 1).tolist(): cLst.append('r') if b is True else cLst.append('b') ax.barh(x, dfSP[0].values, color=cLst) ax.set_yticks(x)", "pc = np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for", "if b is True else cLst.append('b') ax.barh(x, dfSP[0].values, color=cLst) ax.set_yticks(x) ax.set_yticklabels(dfSP.index.tolist()) plt.tight_layout() fig.show()", "['00955']) # rm outlier df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV = y[y", "in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm", "tt = dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x =", "= dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV = y[y < np.percentile(y, 99)] yV =", "= np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for k,", "dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst), return_counts=True) temp", "= 1 if 365 in temp: rMat[k, 2] = 1 # plot map", "b in (dfSP[0] > 1).tolist(): cLst.append('r') if b is True else cLst.append('b') ax.barh(x,", "usgs, gridMET from hydroDL.master import basins from hydroDL.post import axplot, figplot import matplotlib.pyplot", "or 7 in temp: rMat[k, 0] = 1 if 182 in temp: rMat[k,", "= dfG.values dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show()", "df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV = y[y < np.percentile(y, 99)] yV", "gageII, usgs, gridMET from hydroDL.master import basins from hydroDL.post import axplot, figplot import", "np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan # fourier df", "lon, rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show() dfG", "siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat = dfG.values dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(),", "yearly signal') axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat", "np.zeros([len(siteNoLst), 3]) for k, siteNo in enumerate(siteNoLst): temp = dictP[siteNo] if 6 in", "yV[yV > np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan #", "figM, axM = plt.subplots(3, 1, figsize=(8, 6)) for k in range(3): mm =", "hydroDL import kPath from hydroDL.app import waterQuality, DGSA from hydroDL.data import gageII, usgs,", "dictP = dict() for k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955'])", "[]) pu, pc = np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst),", "> np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan # fourier", "np.nan # fourier df = dfObs[dfObs.notna().values] tt = dfObs.index.values xx = (tt.astype('datetime64[D]') -", "= gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM", "in temp or 7 in temp: rMat[k, 0] = 1 if 182 in", "siteNo in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) #", "1) x = range(len(dfSP)) cLst = list() for b in (dfSP[0] > 1).tolist():", "= yV[yV > np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan", "varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3,", "'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1,", "1).tolist(): cLst.append('r') if b is True else cLst.append('b') ax.barh(x, dfSP[0].values, color=cLst) ax.set_yticks(x) ax.set_yticklabels(dfSP.index.tolist())", "if 365 in temp: rMat[k, 2] = 1 # plot map dfCrd =", "numpy as np import os import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst", "y = y-np.nanmean(y) nt = len(xx) freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y)", "y = df['00955'].values yV = y[y < np.percentile(y, 99)] yV = yV[yV >", "> ul] = np.nan # fourier df = dfObs[dfObs.notna().values] tt = dfObs.index.values xx", "0] = 1 if 182 in temp: rMat[k, 1] = 1 if 365", "dfObs[dfObs['00955'] > ul] = np.nan # fourier df = dfObs[dfObs.notna().values] tt = dfObs.index.values", "= len(xx) freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power = ls.power(freq) prob", "= df['00955'].values yV = y[y < np.percentile(y, 99)] yV = yV[yV > np.percentile(y,", "= dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1) x =", "time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP = dict() for k, siteNo", "1 if 365 in temp: rMat[k, 2] = 1 # plot map dfCrd", "- np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y = y-np.nanmean(y) nt = len(xx) freq =", "np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for k, siteNo", "k, siteNo in enumerate(siteNoLst): temp = dictP[siteNo] if 6 in temp or 7", "= list() for b in (dfSP[0] > 1).tolist(): cLst.append('r') if b is True", "<gh_stars>0 from hydroDL import kPath from hydroDL.app import waterQuality, DGSA from hydroDL.data import", "dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1) x = range(len(dfSP))", "import gageII, usgs, gridMET from hydroDL.master import basins from hydroDL.post import axplot, figplot", "if 6 in temp or 7 in temp: rMat[k, 0] = 1 if", "pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()), []) pu, pc =", "list() for b in (dfSP[0] > 1).tolist(): cLst.append('r') if b is True else", "import matplotlib.pyplot as plt import importlib from astropy.timeseries import LombScargle import pandas as", "= dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1, figsize=(8, 6)) for k in range(3):", "from hydroDL.master import basins from hydroDL.post import axplot, figplot import matplotlib.pyplot as plt", "importlib from astropy.timeseries import LombScargle import pandas as pd import numpy as np", "xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float)", "df = dfObs[dfObs.notna().values] tt = dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t =", "from hydroDL.app import waterQuality, DGSA from hydroDL.data import gageII, usgs, gridMET from hydroDL.master", "from hydroDL import kPath from hydroDL.app import waterQuality, DGSA from hydroDL.data import gageII,", "gridMET from hydroDL.master import basins from hydroDL.post import axplot, figplot import matplotlib.pyplot as", "matplotlib.pyplot as plt import importlib from astropy.timeseries import LombScargle import pandas as pd", "LombScargle import pandas as pd import numpy as np import os import time", "len(xx) freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power = ls.power(freq) prob =", "ls = LombScargle(x, y) power = ls.power(freq) prob = ls.false_alarm_probability(power) ind = np.where(prob", "365 in temp: rMat[k, 2] = 1 # plot map dfCrd = gageII.readData(", "np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y", "lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show()", "pandas as pd import numpy as np import os import time wqData =", "gageII.updateCode(dfG) pMat = dfG.values dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax =", "import pandas as pd import numpy as np import os import time wqData", "yV = yV[yV > np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] =", "temp: rMat[k, 0] = 1 if 182 in temp: rMat[k, 1] = 1", "as np import os import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP", "ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig, ax = plt.subplots(1,", "ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1) x", "= df['00955'].values y = y-np.nanmean(y) nt = len(xx) freq = np.fft.fftfreq(nt)[1:] ls =", "as plt import importlib from astropy.timeseries import LombScargle import pandas as pd import", "dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1, figsize=(8, 6)) for k in range(3): mm", "hydroDL.app import waterQuality, DGSA from hydroDL.data import gageII, usgs, gridMET from hydroDL.master import", "0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()), []) pu, pc", "dictP[siteNo] if 6 in temp or 7 in temp: rMat[k, 0] = 1", "print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df", "power = ls.power(freq) prob = ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0] pd =", "1 if 182 in temp: rMat[k, 1] = 1 if 365 in temp:", "hydroDL.data import gageII, usgs, gridMET from hydroDL.master import basins from hydroDL.post import axplot,", "(tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y =", "= np.stack([pu, pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for k, siteNo in enumerate(siteNoLst): temp", "pc]).transpose() rMat = np.zeros([len(siteNoLst), 3]) for k, siteNo in enumerate(siteNoLst): temp = dictP[siteNo]", "rMat[k, 1] = 1 if 365 in temp: rMat[k, 2] = 1 #", "plt import importlib from astropy.timeseries import LombScargle import pandas as pd import numpy", "enumerate(siteNoLst): temp = dictP[siteNo] if 6 in temp or 7 in temp: rMat[k,", "rMat = np.zeros([len(siteNoLst), 3]) for k, siteNo in enumerate(siteNoLst): temp = dictP[siteNo] if", "if 182 in temp: rMat[k, 1] = 1 if 365 in temp: rMat[k,", "np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] = pd.tolist() pLst = sum(list(dictP.values()), [])", "import numpy as np import os import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst =", "= 1 # plot map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat =", "kPath from hydroDL.app import waterQuality, DGSA from hydroDL.data import gageII, usgs, gridMET from", "= ls.power(freq) prob = ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int)))", "= 1 if 182 in temp: rMat[k, 1] = 1 if 365 in", "ax = plt.subplots(1, 1) x = range(len(dfSP)) cLst = list() for b in", "y = df['00955'].values y = y-np.nanmean(y) nt = len(xx) freq = np.fft.fftfreq(nt)[1:] ls", "temp = dictP[siteNo] if 6 in temp or 7 in temp: rMat[k, 0]", "99)] yV = yV[yV > np.percentile(y, 1)] ul = np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul]", "signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat = dfG.values dfS", "pd import numpy as np import os import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst", "(t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y = y-np.nanmean(y) nt = len(xx) freq", "np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y = y-np.nanmean(y) nt = len(xx) freq = np.fft.fftfreq(nt)[1:]", "waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df = dfObs[dfObs['00955'].notna().values] y = df['00955'].values yV =", "dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm outlier df =", "= axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly", "prob = ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0] pd = np.unique(np.abs((1/freq[ind]).astype(int))) dictP[siteNo] =", "enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955']) # rm outlier", "axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat = dfG.values", "# plot map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon", "import waterQuality, DGSA from hydroDL.data import gageII, usgs, gridMET from hydroDL.master import basins", "= np.mean(yV)+np.std(yV)*5 dfObs[dfObs['00955'] > ul] = np.nan # fourier df = dfObs[dfObs.notna().values] tt", "= y-np.nanmean(y) nt = len(xx) freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power", "k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo, ['00955'])", "y) power = ls.power(freq) prob = ls.false_alarm_probability(power) ind = np.where(prob < 0.05)[0] pd", "gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst) lat = dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM =", "figM.show() dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat = dfG.values dfS =", "dfG = gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat = dfG.values dfS = DGSA.DGSA_light(", "- np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values", "pLst = sum(list(dictP.values()), []) pu, pc = np.unique(np.array(pLst), return_counts=True) temp = np.stack([pu, pc]).transpose()", "from hydroDL.post import axplot, figplot import matplotlib.pyplot as plt import importlib from astropy.timeseries", "= df.index.values x = (t.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) y = df['00955'].values y = y-np.nanmean(y)", "k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show() dfG = gageII.readData(varLst=gageII.varLst,", "import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP = dict() for k,", "gageII.readData(varLst=gageII.varLst, siteNoLst=siteNoLst) dfG = gageII.updateCode(dfG) pMat = dfG.values dfS = DGSA.DGSA_light( pMat, rMat,", "dfG.values dfS = DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP", "1, figsize=(8, 6)) for k in range(3): mm = axplot.mapPoint(axM[k], lat, lon, rMat[:,", "in temp: rMat[k, 1] = 1 if 365 in temp: rMat[k, 2] =", "from astropy.timeseries import LombScargle import pandas as pd import numpy as np import", "n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1)", "in temp: rMat[k, 0] = 1 if 182 in temp: rMat[k, 1] =", "y-np.nanmean(y) nt = len(xx) freq = np.fft.fftfreq(nt)[1:] ls = LombScargle(x, y) power =", "for k in range(3): mm = axplot.mapPoint(axM[k], lat, lon, rMat[:, k], s=12) axM[0].set_title('weekly", "plt.show() dfSP = dfS.sort_values(by=0) fig, ax = plt.subplots(1, 1) x = range(len(dfSP)) cLst", "= DGSA.DGSA_light( pMat, rMat, ParametersNames=dfG.columns.tolist(), n_clsters=3) ax = dfS.sort_values(by=0).plot.barh() plt.show() dfSP = dfS.sort_values(by=0)", "axM = plt.subplots(3, 1, figsize=(8, 6)) for k in range(3): mm = axplot.mapPoint(axM[k],", "= dict() for k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs", "= dfCrd['LAT_GAGE'].values lon = dfCrd['LNG_GAGE'].values figM, axM = plt.subplots(3, 1, figsize=(8, 6)) for", "rMat[:, k], s=12) axM[0].set_title('weekly signal') axM[1].set_title('half yearly signal') axM[2].set_title('yearly signal') figM.show() dfG =", "import basins from hydroDL.post import axplot, figplot import matplotlib.pyplot as plt import importlib", "x = range(len(dfSP)) cLst = list() for b in (dfSP[0] > 1).tolist(): cLst.append('r')", "dfObs[dfObs.notna().values] tt = dfObs.index.values xx = (tt.astype('datetime64[D]') - np.datetime64('1979-01-01')).astype(np.float) t = df.index.values x", "rMat[k, 2] = 1 # plot map dfCrd = gageII.readData( varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst)", "for k, siteNo in enumerate(siteNoLst): print(siteNo) dfObs = waterQuality.readSiteY(siteNo, ['00955']) dfObs = waterQuality.readSiteY(siteNo,", "3]) for k, siteNo in enumerate(siteNoLst): temp = dictP[siteNo] if 6 in temp", "os import time wqData = waterQuality.DataModelWQ('Silica64') siteNoLst = wqData.siteNoLst dictP = dict() for" ]
[ "builders.util import shell_call __author__ = \"<NAME>\" __copyright__ = \"Copyright (C) Nginx, Inc. All", "\"\" __maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\" if __name__ == '__main__': package =", "from builders import deb, rpm, amazon from builders.util import shell_call __author__ = \"<NAME>\"", "__author__ = \"<NAME>\" __copyright__ = \"Copyright (C) Nginx, Inc. All rights reserved.\" __license__", "= \"\" __maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\" if __name__ == '__main__': package", "__maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\" if __name__ == '__main__': package = 'nginx-amplify-agent'", "os import sys from builders import deb, rpm, amazon from builders.util import shell_call", "len(sys.argv) == 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release", "sys from builders import deb, rpm, amazon from builders.util import shell_call __author__ =", "'__main__': package = 'nginx-amplify-agent' if len(sys.argv) == 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package)", "import shell_call __author__ = \"<NAME>\" __copyright__ = \"Copyright (C) Nginx, Inc. All rights", "# -*- coding: utf-8 -*- import os import sys from builders import deb,", "All rights reserved.\" __license__ = \"\" __maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\" if", "rights reserved.\" __license__ = \"\" __maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\" if __name__", "1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat", "'nginx-amplify-agent' if len(sys.argv) == 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package)", "coding: utf-8 -*- import os import sys from builders import deb, rpm, amazon", "= \"<EMAIL>\" if __name__ == '__main__': package = 'nginx-amplify-agent' if len(sys.argv) == 1", "deb, rpm, amazon from builders.util import shell_call __author__ = \"<NAME>\" __copyright__ = \"Copyright", "== '__main__': package = 'nginx-amplify-agent' if len(sys.argv) == 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'):", "package = 'nginx-amplify-agent' if len(sys.argv) == 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif", "else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release',", "important=False) if 'amazon linux' in os_release.lower(): amazon.build(package=package) else: print(\"sorry, it will be done", "from builders.util import shell_call __author__ = \"<NAME>\" __copyright__ = \"Copyright (C) Nginx, Inc.", "= \"Copyright (C) Nginx, Inc. All rights reserved.\" __license__ = \"\" __maintainer__ =", "\"Copyright (C) Nginx, Inc. All rights reserved.\" __license__ = \"\" __maintainer__ = \"<NAME>\"", "rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release', important=False) if 'amazon linux' in os_release.lower(): amazon.build(package=package)", "amazon from builders.util import shell_call __author__ = \"<NAME>\" __copyright__ = \"Copyright (C) Nginx,", "import sys from builders import deb, rpm, amazon from builders.util import shell_call __author__", "os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release', important=False) if 'amazon", "utf-8 -*- import os import sys from builders import deb, rpm, amazon from", "__copyright__ = \"Copyright (C) Nginx, Inc. All rights reserved.\" __license__ = \"\" __maintainer__", "if 'amazon linux' in os_release.lower(): amazon.build(package=package) else: print(\"sorry, it will be done later\\n\")", "Inc. All rights reserved.\" __license__ = \"\" __maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\"", "reserved.\" __license__ = \"\" __maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\" if __name__ ==", "\"<NAME>\" __email__ = \"<EMAIL>\" if __name__ == '__main__': package = 'nginx-amplify-agent' if len(sys.argv)", "import deb, rpm, amazon from builders.util import shell_call __author__ = \"<NAME>\" __copyright__ =", "#!/usr/bin/python # -*- coding: utf-8 -*- import os import sys from builders import", "shell_call __author__ = \"<NAME>\" __copyright__ = \"Copyright (C) Nginx, Inc. All rights reserved.\"", "deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release', important=False) if 'amazon linux'", "else: os_release = shell_call('cat /etc/os-release', important=False) if 'amazon linux' in os_release.lower(): amazon.build(package=package) else:", "__email__ = \"<EMAIL>\" if __name__ == '__main__': package = 'nginx-amplify-agent' if len(sys.argv) ==", "if len(sys.argv) == 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else:", "sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release', important=False)", "shell_call('cat /etc/os-release', important=False) if 'amazon linux' in os_release.lower(): amazon.build(package=package) else: print(\"sorry, it will", "elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release', important=False) if 'amazon linux' in", "(C) Nginx, Inc. All rights reserved.\" __license__ = \"\" __maintainer__ = \"<NAME>\" __email__", "= \"<NAME>\" __copyright__ = \"Copyright (C) Nginx, Inc. All rights reserved.\" __license__ =", "rpm, amazon from builders.util import shell_call __author__ = \"<NAME>\" __copyright__ = \"Copyright (C)", "Nginx, Inc. All rights reserved.\" __license__ = \"\" __maintainer__ = \"<NAME>\" __email__ =", "= 'nginx-amplify-agent' if len(sys.argv) == 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'):", "\"<EMAIL>\" if __name__ == '__main__': package = 'nginx-amplify-agent' if len(sys.argv) == 1 else", "__name__ == '__main__': package = 'nginx-amplify-agent' if len(sys.argv) == 1 else sys.argv[1] if", "os_release = shell_call('cat /etc/os-release', important=False) if 'amazon linux' in os_release.lower(): amazon.build(package=package) else: print(\"sorry,", "/etc/os-release', important=False) if 'amazon linux' in os_release.lower(): amazon.build(package=package) else: print(\"sorry, it will be", "= shell_call('cat /etc/os-release', important=False) if 'amazon linux' in os_release.lower(): amazon.build(package=package) else: print(\"sorry, it", "-*- coding: utf-8 -*- import os import sys from builders import deb, rpm,", "__license__ = \"\" __maintainer__ = \"<NAME>\" __email__ = \"<EMAIL>\" if __name__ == '__main__':", "if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release', important=False) if", "os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release = shell_call('cat /etc/os-release', important=False) if 'amazon linux' in os_release.lower():", "\"<NAME>\" __copyright__ = \"Copyright (C) Nginx, Inc. All rights reserved.\" __license__ = \"\"", "if __name__ == '__main__': package = 'nginx-amplify-agent' if len(sys.argv) == 1 else sys.argv[1]", "import os import sys from builders import deb, rpm, amazon from builders.util import", "== 1 else sys.argv[1] if os.path.isfile('/etc/debian_version'): deb.build(package=package) elif os.path.isfile('/etc/redhat-release'): rpm.build(package=package) else: os_release =", "builders import deb, rpm, amazon from builders.util import shell_call __author__ = \"<NAME>\" __copyright__", "= \"<NAME>\" __email__ = \"<EMAIL>\" if __name__ == '__main__': package = 'nginx-amplify-agent' if", "-*- import os import sys from builders import deb, rpm, amazon from builders.util" ]
[ "% addr # got message from a client else: # in Windows, when", "> 0: # echo back the client message sock.send(data) else: # client disconnected", "self.active_sockets: sock.close() # clear all connections list self.active_sockets[:] = [] self.clients_info.clear() # close", "signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started on port", "= None def __init__(self, addr): self.addr = addr class Server: active_sockets = []", "self.addr = addr class Server: active_sockets = [] clients_info = {} is_running =", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def", "peer\" exception will be thrown try: data = sock.recv(RECV_BUFFER) # if client socket", "will be thrown try: data = sock.recv(RECV_BUFFER) # if client socket has been", "False def __init__(self, host, port): self.host = host self.port = port def start(self,", "import socket, select import signal class SimpleSignalHandler: should_exit = False def __init__(self): signal.signal(signal.SIGINT,", "self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit = True class Client: addr =", "self.should_exit = True class Client: addr = None def __init__(self, addr): self.addr =", "read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock in read_sockets: # new", "when a TCP program closes abruptly, # a \"Connection reset by peer\" exception", "True def shutdown(self): for sock in self.active_sockets: sock.close() # clear all connections list", "= 8000 MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS)", "buffer size PORT = 8000 MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler() server =", "select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock in read_sockets: # new connection if sock", "self.is_running = False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client", "= select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock in read_sockets: # new connection if", "self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit = True class Client: addr = None def", "is_running = False def __init__(self, host, port): self.host = host self.port = port", "be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock in read_sockets: #", "in which there is a new connection recieved through server_socket sockfd, addr =", "\"Server started on port \" + str(PORT) timeout = 1 while server.is_running: server.select(timeout)", "self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client (%s, %s) is offline\" % client.addr def", "client.addr def select(self, timeout=1): try: # get the list of sockets which are", "a \"Connection reset by peer\" exception will be thrown try: data = sock.recv(RECV_BUFFER)", "has been closed - the received buffer will be empty if len(data) >", "KeyboardInterrupt) as e: self.shutdown() if __name__ == \"__main__\": RECV_BUFFER = 1024 # buffer", "self.disconnect_client(sock) # client disconnected (Windows) except: self.disconnect_client(sock) continue for sock in error_sockets: self.disconnect_client(sock)", "class SimpleSignalHandler: should_exit = False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum,", "Server: active_sockets = [] clients_info = {} is_running = False def __init__(self, host,", "program closes abruptly, # a \"Connection reset by peer\" exception will be thrown", "signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit = True class Client: addr = None", "timeout=1): try: # get the list of sockets which are ready to be", "been closed - the received buffer will be empty if len(data) > 0:", "Client(addr) print \"Client (%s, %s) connected\" % addr # got message from a", "port def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port))", "# a \"Connection reset by peer\" exception will be thrown try: data =", "(Windows) except: self.disconnect_client(sock) continue for sock in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as", "in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e: self.shutdown() if __name__ == \"__main__\":", "self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self): for sock in self.active_sockets: sock.close() #", "\"__main__\": RECV_BUFFER = 1024 # buffer size PORT = 8000 MAX_CLIENTS = 10", "in Windows, when a TCP program closes abruptly, # a \"Connection reset by", "e: self.shutdown() if __name__ == \"__main__\": RECV_BUFFER = 1024 # buffer size PORT", "None def __init__(self, addr): self.addr = addr class Server: active_sockets = [] clients_info", "port): self.host = host self.port = port def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET,", "socket self.server_socket.close() self.is_running = False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd)", "a client else: # in Windows, when a TCP program closes abruptly, #", "data = sock.recv(RECV_BUFFER) # if client socket has been closed - the received", "frame): self.should_exit = True class Client: addr = None def __init__(self, addr): self.addr", "if client socket has been closed - the received buffer will be empty", "%s) connected\" % addr # got message from a client else: # in", "(%s, %s) is offline\" % client.addr def select(self, timeout=1): try: # get the", "select(self, timeout=1): try: # get the list of sockets which are ready to", "sock.recv(RECV_BUFFER) # if client socket has been closed - the received buffer will", "client = self.clients_info.pop(sockfd) print \"Client (%s, %s) is offline\" % client.addr def select(self,", "should_exit = False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit", "the case in which there is a new connection recieved through server_socket sockfd,", "def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client (%s, %s) is", "close listening socket self.server_socket.close() self.is_running = False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client", "connections list self.active_sockets[:] = [] self.clients_info.clear() # close listening socket self.server_socket.close() self.is_running =", "case in which there is a new connection recieved through server_socket sockfd, addr", "if len(data) > 0: # echo back the client message sock.send(data) else: #", "clients_info = {} is_running = False def __init__(self, host, port): self.host = host", "try: # get the list of sockets which are ready to be read", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True", "host, port): self.host = host self.port = port def start(self, max_clients=10): self.server_socket =", "abruptly, # a \"Connection reset by peer\" exception will be thrown try: data", "except: self.disconnect_client(sock) continue for sock in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e:", "# get the list of sockets which are ready to be read read_sockets,write_sockets,error_sockets", "except (select.error, KeyboardInterrupt) as e: self.shutdown() if __name__ == \"__main__\": RECV_BUFFER = 1024", "received buffer will be empty if len(data) > 0: # echo back the", "[] clients_info = {} is_running = False def __init__(self, host, port): self.host =", "# echo back the client message sock.send(data) else: # client disconnected self.disconnect_client(sock) #", "sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client (%s, %s) is offline\" %", "self.server_socket.close() self.is_running = False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print", "if sock == self.server_socket: # Handle the case in which there is a", "0: # echo back the client message sock.send(data) else: # client disconnected self.disconnect_client(sock)", "client disconnected (Windows) except: self.disconnect_client(sock) continue for sock in error_sockets: self.disconnect_client(sock) except (select.error,", "1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self): for sock in", "Handle the case in which there is a new connection recieved through server_socket", "addr # got message from a client else: # in Windows, when a", "= [] clients_info = {} is_running = False def __init__(self, host, port): self.host", "False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit = True", "False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client (%s, %s)", "self.is_running = True def shutdown(self): for sock in self.active_sockets: sock.close() # clear all", "start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket)", "shutdown(self): for sock in self.active_sockets: sock.close() # clear all connections list self.active_sockets[:] =", "sock == self.server_socket: # Handle the case in which there is a new", "are ready to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock", "if __name__ == \"__main__\": RECV_BUFFER = 1024 # buffer size PORT = 8000", "\" + str(PORT) timeout = 1 while server.is_running: server.select(timeout) if signal_handler.should_exit: server.shutdown() print", "exit_gracefully(self,signum, frame): self.should_exit = True class Client: addr = None def __init__(self, addr):", "import signal class SimpleSignalHandler: should_exit = False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully)", "socket, select import signal class SimpleSignalHandler: should_exit = False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully)", "self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self): for sock in self.active_sockets:", "# clear all connections list self.active_sockets[:] = [] self.clients_info.clear() # close listening socket", "sockets which are ready to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout)", "= port def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host,", "- the received buffer will be empty if len(data) > 0: # echo", "# client disconnected (Windows) except: self.disconnect_client(sock) continue for sock in error_sockets: self.disconnect_client(sock) except", "on port \" + str(PORT) timeout = 1 while server.is_running: server.select(timeout) if signal_handler.should_exit:", "self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running =", "# in Windows, when a TCP program closes abruptly, # a \"Connection reset", "try: data = sock.recv(RECV_BUFFER) # if client socket has been closed - the", "== \"__main__\": RECV_BUFFER = 1024 # buffer size PORT = 8000 MAX_CLIENTS =", "self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self): for sock in self.active_sockets: sock.close() # clear", "# got message from a client else: # in Windows, when a TCP", "buffer will be empty if len(data) > 0: # echo back the client", "signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit = True class Client: addr", "client socket has been closed - the received buffer will be empty if", "= False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit =", "new connection recieved through server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr)", "self.clients_info.clear() # close listening socket self.server_socket.close() self.is_running = False def disconnect_client(self, sockfd): sockfd.close()", "def __init__(self, host, port): self.host = host self.port = port def start(self, max_clients=10):", "connection if sock == self.server_socket: # Handle the case in which there is", "error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e: self.shutdown() if __name__ == \"__main__\": RECV_BUFFER", "Client: addr = None def __init__(self, addr): self.addr = addr class Server: active_sockets", "self.shutdown() if __name__ == \"__main__\": RECV_BUFFER = 1024 # buffer size PORT =", "a new connection recieved through server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] =", "# if client socket has been closed - the received buffer will be", "sock.close() # clear all connections list self.active_sockets[:] = [] self.clients_info.clear() # close listening", "(select.error, KeyboardInterrupt) as e: self.shutdown() if __name__ == \"__main__\": RECV_BUFFER = 1024 #", "= Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started on port \" + str(PORT) timeout", "list of sockets which are ready to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] ,", "= {} is_running = False def __init__(self, host, port): self.host = host self.port", "signal class SimpleSignalHandler: should_exit = False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def", "host self.port = port def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,", "= self.clients_info.pop(sockfd) print \"Client (%s, %s) is offline\" % client.addr def select(self, timeout=1):", "thrown try: data = sock.recv(RECV_BUFFER) # if client socket has been closed -", "the received buffer will be empty if len(data) > 0: # echo back", "self.port = port def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)", "(%s, %s) connected\" % addr # got message from a client else: #", "sock in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e: self.shutdown() if __name__ ==", "def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients)", "= [] self.clients_info.clear() # close listening socket self.server_socket.close() self.is_running = False def disconnect_client(self,", "= Client(addr) print \"Client (%s, %s) connected\" % addr # got message from", "= sock.recv(RECV_BUFFER) # if client socket has been closed - the received buffer", "self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self): for sock in self.active_sockets: sock.close()", "+ str(PORT) timeout = 1 while server.is_running: server.select(timeout) if signal_handler.should_exit: server.shutdown() print \"Server", "def exit_gracefully(self,signum, frame): self.should_exit = True class Client: addr = None def __init__(self,", "RECV_BUFFER = 1024 # buffer size PORT = 8000 MAX_CLIENTS = 10 signal_handler", "in self.active_sockets: sock.close() # clear all connections list self.active_sockets[:] = [] self.clients_info.clear() #", "new connection if sock == self.server_socket: # Handle the case in which there", "read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock in read_sockets: # new connection", "be empty if len(data) > 0: # echo back the client message sock.send(data)", "= 10 signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started", "10 signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started on", "read_sockets: # new connection if sock == self.server_socket: # Handle the case in", "sock in self.active_sockets: sock.close() # clear all connections list self.active_sockets[:] = [] self.clients_info.clear()", "client else: # in Windows, when a TCP program closes abruptly, # a", "empty if len(data) > 0: # echo back the client message sock.send(data) else:", "self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e: self.shutdown() if __name__ == \"__main__\": RECV_BUFFER =", "as e: self.shutdown() if __name__ == \"__main__\": RECV_BUFFER = 1024 # buffer size", "1024 # buffer size PORT = 8000 MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler()", "listening socket self.server_socket.close() self.is_running = False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client =", "= SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started on port \"", "print \"Client (%s, %s) connected\" % addr # got message from a client", "8000 MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print", "self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self): for", "all connections list self.active_sockets[:] = [] self.clients_info.clear() # close listening socket self.server_socket.close() self.is_running", "list self.active_sockets[:] = [] self.clients_info.clear() # close listening socket self.server_socket.close() self.is_running = False", "is offline\" % client.addr def select(self, timeout=1): try: # get the list of", "def select(self, timeout=1): try: # get the list of sockets which are ready", "self.server_socket: # Handle the case in which there is a new connection recieved", "addr): self.addr = addr class Server: active_sockets = [] clients_info = {} is_running", "connected\" % addr # got message from a client else: # in Windows,", "def __init__(self, addr): self.addr = addr class Server: active_sockets = [] clients_info =", "started on port \" + str(PORT) timeout = 1 while server.is_running: server.select(timeout) if", "get the list of sockets which are ready to be read read_sockets,write_sockets,error_sockets =", "% client.addr def select(self, timeout=1): try: # get the list of sockets which", "server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client (%s, %s)", "the list of sockets which are ready to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[]", "True class Client: addr = None def __init__(self, addr): self.addr = addr class", "# close listening socket self.server_socket.close() self.is_running = False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd)", "# client disconnected self.disconnect_client(sock) # client disconnected (Windows) except: self.disconnect_client(sock) continue for sock", "__init__(self, host, port): self.host = host self.port = port def start(self, max_clients=10): self.server_socket", "ready to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock in", "__init__(self, addr): self.addr = addr class Server: active_sockets = [] clients_info = {}", ", self.active_sockets, timeout) for sock in read_sockets: # new connection if sock ==", "client disconnected self.disconnect_client(sock) # client disconnected (Windows) except: self.disconnect_client(sock) continue for sock in", "self.active_sockets, timeout) for sock in read_sockets: # new connection if sock == self.server_socket:", "select import signal class SimpleSignalHandler: should_exit = False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM,", "max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running", "{} is_running = False def __init__(self, host, port): self.host = host self.port =", "from a client else: # in Windows, when a TCP program closes abruptly,", "by peer\" exception will be thrown try: data = sock.recv(RECV_BUFFER) # if client", "self.clients_info.pop(sockfd) print \"Client (%s, %s) is offline\" % client.addr def select(self, timeout=1): try:", "\"Client (%s, %s) is offline\" % client.addr def select(self, timeout=1): try: # get", "a TCP program closes abruptly, # a \"Connection reset by peer\" exception will", "disconnected self.disconnect_client(sock) # client disconnected (Windows) except: self.disconnect_client(sock) continue for sock in error_sockets:", "self.host = host self.port = port def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client (%s, %s) connected\"", "in read_sockets: # new connection if sock == self.server_socket: # Handle the case", "= addr class Server: active_sockets = [] clients_info = {} is_running = False", "exception will be thrown try: data = sock.recv(RECV_BUFFER) # if client socket has", "connection recieved through server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print", "active_sockets = [] clients_info = {} is_running = False def __init__(self, host, port):", "__init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit = True class Client:", "clear all connections list self.active_sockets[:] = [] self.clients_info.clear() # close listening socket self.server_socket.close()", "is a new connection recieved through server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd]", "= True def shutdown(self): for sock in self.active_sockets: sock.close() # clear all connections", "for sock in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e: self.shutdown() if __name__", "sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client (%s, %s) is offline\" % client.addr", "= True class Client: addr = None def __init__(self, addr): self.addr = addr", "closes abruptly, # a \"Connection reset by peer\" exception will be thrown try:", "self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client (%s, %s) connected\" % addr # got", "len(data) > 0: # echo back the client message sock.send(data) else: # client", "self.disconnect_client(sock) continue for sock in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e: self.shutdown()", "MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server", "TCP program closes abruptly, # a \"Connection reset by peer\" exception will be", "__name__ == \"__main__\": RECV_BUFFER = 1024 # buffer size PORT = 8000 MAX_CLIENTS", "[] self.clients_info.clear() # close listening socket self.server_socket.close() self.is_running = False def disconnect_client(self, sockfd):", "addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client (%s, %s) connected\" %", "socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self):", "there is a new connection recieved through server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd)", "\"Client (%s, %s) connected\" % addr # got message from a client else:", "addr class Server: active_sockets = [] clients_info = {} is_running = False def", "which there is a new connection recieved through server_socket sockfd, addr = self.server_socket.accept()", "through server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client (%s,", "addr = None def __init__(self, addr): self.addr = addr class Server: active_sockets =", "recieved through server_socket sockfd, addr = self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client", "class Server: active_sockets = [] clients_info = {} is_running = False def __init__(self,", "continue for sock in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt) as e: self.shutdown() if", "else: # in Windows, when a TCP program closes abruptly, # a \"Connection", "else: # client disconnected self.disconnect_client(sock) # client disconnected (Windows) except: self.disconnect_client(sock) continue for", "the client message sock.send(data) else: # client disconnected self.disconnect_client(sock) # client disconnected (Windows)", "print \"Client (%s, %s) is offline\" % client.addr def select(self, timeout=1): try: #", "class Client: addr = None def __init__(self, addr): self.addr = addr class Server:", "for sock in self.active_sockets: sock.close() # clear all connections list self.active_sockets[:] = []", "to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout) for sock in read_sockets:", "# new connection if sock == self.server_socket: # Handle the case in which", "def shutdown(self): for sock in self.active_sockets: sock.close() # clear all connections list self.active_sockets[:]", "got message from a client else: # in Windows, when a TCP program", "be thrown try: data = sock.recv(RECV_BUFFER) # if client socket has been closed", "disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client (%s, %s) is offline\"", "\"Connection reset by peer\" exception will be thrown try: data = sock.recv(RECV_BUFFER) #", "socket has been closed - the received buffer will be empty if len(data)", "client message sock.send(data) else: # client disconnected self.disconnect_client(sock) # client disconnected (Windows) except:", "size PORT = 8000 MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\",", "server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started on port \" + str(PORT)", "Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started on port \" + str(PORT) timeout =", "echo back the client message sock.send(data) else: # client disconnected self.disconnect_client(sock) # client", "= False def disconnect_client(self, sockfd): sockfd.close() self.active_sockets.remove(sockfd) client = self.clients_info.pop(sockfd) print \"Client (%s,", "will be empty if len(data) > 0: # echo back the client message", "sock in read_sockets: # new connection if sock == self.server_socket: # Handle the", "PORT = 8000 MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT)", "which are ready to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets, timeout) for", "# buffer size PORT = 8000 MAX_CLIENTS = 10 signal_handler = SimpleSignalHandler() server", "print \"Server started on port \" + str(PORT) timeout = 1 while server.is_running:", "self.active_sockets[:] = [] self.clients_info.clear() # close listening socket self.server_socket.close() self.is_running = False def", "= 1024 # buffer size PORT = 8000 MAX_CLIENTS = 10 signal_handler =", "offline\" % client.addr def select(self, timeout=1): try: # get the list of sockets", "# Handle the case in which there is a new connection recieved through", "SimpleSignalHandler() server = Server(\"0.0.0.0\", PORT) server.start(MAX_CLIENTS) print \"Server started on port \" +", "= False def __init__(self, host, port): self.host = host self.port = port def", "self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client (%s, %s) connected\" % addr #", "sock.send(data) else: # client disconnected self.disconnect_client(sock) # client disconnected (Windows) except: self.disconnect_client(sock) continue", "= host self.port = port def start(self, max_clients=10): self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server_socket.setsockopt(socket.SOL_SOCKET,", "= self.server_socket.accept() self.active_sockets.append(sockfd) self.clients_info[sockfd] = Client(addr) print \"Client (%s, %s) connected\" % addr", "PORT) server.start(MAX_CLIENTS) print \"Server started on port \" + str(PORT) timeout = 1", "reset by peer\" exception will be thrown try: data = sock.recv(RECV_BUFFER) # if", "def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame): self.should_exit = True class", "socket.SO_REUSEADDR, 1) self.server_socket.bind((self.host, self.port)) self.server_socket.listen(max_clients) self.active_sockets.append(self.server_socket) self.is_running = True def shutdown(self): for sock", "self.clients_info[sockfd] = Client(addr) print \"Client (%s, %s) connected\" % addr # got message", "timeout) for sock in read_sockets: # new connection if sock == self.server_socket: #", "SimpleSignalHandler: should_exit = False def __init__(self): signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) def exit_gracefully(self,signum, frame):", "%s) is offline\" % client.addr def select(self, timeout=1): try: # get the list", "Windows, when a TCP program closes abruptly, # a \"Connection reset by peer\"", "message from a client else: # in Windows, when a TCP program closes", "back the client message sock.send(data) else: # client disconnected self.disconnect_client(sock) # client disconnected", "== self.server_socket: # Handle the case in which there is a new connection", "closed - the received buffer will be empty if len(data) > 0: #", "for sock in read_sockets: # new connection if sock == self.server_socket: # Handle", "disconnected (Windows) except: self.disconnect_client(sock) continue for sock in error_sockets: self.disconnect_client(sock) except (select.error, KeyboardInterrupt)", "server.start(MAX_CLIENTS) print \"Server started on port \" + str(PORT) timeout = 1 while", "str(PORT) timeout = 1 while server.is_running: server.select(timeout) if signal_handler.should_exit: server.shutdown() print \"Server exited\"", "message sock.send(data) else: # client disconnected self.disconnect_client(sock) # client disconnected (Windows) except: self.disconnect_client(sock)", "port \" + str(PORT) timeout = 1 while server.is_running: server.select(timeout) if signal_handler.should_exit: server.shutdown()", "of sockets which are ready to be read read_sockets,write_sockets,error_sockets = select.select(self.active_sockets,[] , self.active_sockets," ]
[ "3.0.8 on 2020-12-19 21:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'),", "= [ ('category', '0001_initial'), ('product', '0001_initial'), ] operations = [ migrations.AddField( model_name='product', name='category',", "migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True,", "= [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image',", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'),", "to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ), migrations.AlterField( model_name='product', name='is_active', field=models.BooleanField(blank=True,", "21:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'), ] operations = [", "Django 3.0.8 on 2020-12-19 21:33 from django.db import migrations, models import django.db.models.deletion class", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product',", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'), ] operations", "name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ),", "Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'), ] operations = [ migrations.AddField(", "null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ), migrations.AlterField( model_name='product',", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'), ] operations =", "] operations = [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField(", "by Django 3.0.8 on 2020-12-19 21:33 from django.db import migrations, models import django.db.models.deletion", "'0001_initial'), ] operations = [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ),", "[ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True,", "'0001_initial'), ('product', '0001_initial'), ] operations = [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,", "('product', '0001_initial'), ] operations = [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'),", "model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ), migrations.AlterField( model_name='product', name='is_active', field=models.BooleanField(blank=True, default=True), ), ]", "on 2020-12-19 21:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ), migrations.AlterField(", "operations = [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product',", "), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ), migrations.AlterField( model_name='product', name='is_active', field=models.BooleanField(blank=True, default=True),", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category',", "migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ), migrations.AlterField( model_name='product', name='is_active', field=models.BooleanField(blank=True, default=True), ),", "Generated by Django 3.0.8 on 2020-12-19 21:33 from django.db import migrations, models import", "# Generated by Django 3.0.8 on 2020-12-19 21:33 from django.db import migrations, models", "[ ('category', '0001_initial'), ('product', '0001_initial'), ] operations = [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True,", "('category', '0001_initial'), ('product', '0001_initial'), ] operations = [ migrations.AddField( model_name='product', name='category', field=models.ForeignKey(blank=True, null=True,", "dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'), ] operations = [ migrations.AddField( model_name='product',", "model_name='product', name='category', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'),", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('category', '0001_initial'), ('product', '0001_initial'), ]", "2020-12-19 21:33 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "on_delete=django.db.models.deletion.SET_NULL, to='category.Category'), ), migrations.AlterField( model_name='product', name='image', field=models.ImageField(blank=True, null=True, upload_to='images/'), ), migrations.AlterField( model_name='product', name='is_active'," ]
[ "lambda x: x[\"division\"].value_counts(normalize=True) ) for df, name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ],", "# Functions def make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making glass metadata\")", ".assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for x in set(lad_sector_shares[\"laua\"]): sel =", ".properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation,", "driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo distributions sh = read_shape()", "= extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup()", "\"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda", "nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between Glass", "sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target,", "= ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\",", "import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target =", "import logging import requests from zipfile import ZipFile from io import BytesIO import", "legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True,", "score=60): \"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house = get_glass_house() glass_ch_meta", "\"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json())", "\"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo]", "# Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies] ]", "glass_ch_meta def make_companies(): \"\"\"Make the companies house table\"\"\" logging.info(\"Making CH\") companies_address = get_address()", ").properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart", "sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart", "Chart comparing sector distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False,", "Read everything nspl = read_nspl() companies = make_companies() glass_meta = make_glass_meta(companies) # Focus", "coverage lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for df,", ") return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP =", "title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400,", "def make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house =", "is True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf =", ") from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\"", "- 1) ) return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP,", ".configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH)", "\"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation", ".assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies def fetch_nspl(): \"\"\"Fetch NSPL", "glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by division coverage", ".mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass", "and codes UK as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading", "\"LA_UA names and codes UK as at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return", "\"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read", "os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD", "if os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url)", ") from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart import", "width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing", "lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1] for", "Functions def make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house", "read_nspl() companies = make_companies() glass_meta = make_glass_meta(companies) # Focus on Scotland # Scot", "= alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair( lad_share_comparison, \"glass_sector_place_validation\", driver=driver, path=FIG_PATH ) export_chart(lad_share_comparison,", "io import BytesIO import altair as alt from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house", "CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart", ") ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] /", "( plot_choro( merged_json, \"share_norm\", \"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0)", "df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\")", "name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names and codes UK", "between Glass and CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass,", "ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300)", "x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr", "stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH", "lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm >", "os import logging import requests from zipfile import ZipFile from io import BytesIO", "out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass, ch]], axis=1 ) .rename(columns={0:", "1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) - 1) ) return out fetch_nspl()", "lads_corr_dict = {k[0]: k[1] for k in corr_list} lads_sorted = [x[0] for x", "nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies,", "requests from zipfile import ZipFile from io import BytesIO import altair as alt", "Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP)", "= ( pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass, ch]], axis=1 ) .rename(columns={0: \"glass\",", "for df, name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns", "= ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH", "# Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] =", "glass_meta = make_glass_meta(companies) # Focus on Scotland # Scot glass_meta_sc, companies_sc = [", "\"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map =", "json import numpy as np import os import logging import requests from zipfile", "import get_address, get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import", "x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart = ( alt.Chart(lad_sector_shares)", "sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir", "_LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read everything nspl = read_nspl() companies", "altair as alt from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector from", "\"share_norm\", sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"],", "driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by division coverage lad_sector_shares = ( pd.concat(", "y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH", "merged_json = json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json, \"share_norm\", \"Glass vs CH share\",", "companies def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is", "names and codes UK as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\"", "house table\"\"\" logging.info(\"Making CH\") companies_address = get_address() companies_sector = get_sector() companies = (", "exist_ok=True) req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names", ".reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = []", "( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True),", "get_address, get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import (", "return glass_ch_meta def make_companies(): \"\"\"Make the companies house table\"\"\" logging.info(\"Making CH\") companies_address =", "key=lambda x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart =", "( pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for df, name in zip([glass_meta_sc,", "get_address() companies_sector = get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\",", "_SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read everything", "make_lad_lookup() # Read everything nspl = read_nspl() companies = make_companies() glass_meta = make_glass_meta(companies)", "sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\"", "tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu", "= pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return", "sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver =", "distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass", "corr]) lads_corr_dict = {k[0]: k[1] for k in corr_list} lads_sorted = [x[0] for", "import ( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir =", "zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and codes 2020\"\"\" name_lu", ".rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for", "distributions sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged =", "share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map)", "[c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return", "for x in sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"])", "path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by division coverage lad_sector_shares = ( pd.concat( [", "x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted", "and codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes UK as", "validate glass data import pandas as pd import json import numpy as np", "title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart,", "on=\"company_number\", ) .assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl,", "data import pandas as pd import json import numpy as np import os", ".assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\",", "type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df)", "os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making", "requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and codes 2020\"\"\"", "save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH =", "name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names and codes UK as at", "x: (x[\"glass\"] / x[\"companies\"]) - 1) ) return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP", "k[1] for k in corr_list} lads_sorted = [x[0] for x in sorted(corr_list, key=lambda", "= google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target,", "\"\"\"Compare distributions between Glass and CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for df", "\"share_norm\"], ) ).properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y(", ").properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair( lad_share_comparison, \"glass_sector_place_validation\", driver=driver,", "as pd import json import numpy as np import os import logging import", "codes UK as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\")", "glass metadata\") glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta", ".encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs", "\"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode(", "alt from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division import", "height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair( lad_share_comparison, \"glass_sector_place_validation\", driver=driver, path=FIG_PATH", "NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names))", "\"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for x", "color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair(", "= ( pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for df, name in", "# LAD by division coverage lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply( lambda x:", "): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0,", "to validate glass data import pandas as pd import json import numpy as", "save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by division coverage lad_sector_shares =", "glass data import pandas as pd import json import numpy as np import", "CH share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart,", "# .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver,", "= ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False,", "sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json,", "out fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP", "sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector", "# Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup()", "return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names and codes UK as at 04_20.csv\",", "at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location,", "\"companies\"]].corr() # Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"]", "axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ),", "x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies def fetch_nspl(): \"\"\"Fetch NSPL (if", "companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies] ] sector_shares = (", "( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda", "import json import numpy as np import os import logging import requests from", "import BytesIO import altair as alt from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import", "export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo distributions sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc,", "x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) )", "logging import requests from zipfile import ZipFile from io import BytesIO import altair", "= os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata table\"\"\"", "glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta def make_companies(): \"\"\"Make the companies house table\"\"\"", "\"glass_place_validation\") # LAD by division coverage lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply( lambda", "= f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") # Functions", ") ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair( lad_share_comparison, \"glass_sector_place_validation\",", "), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison =", "= [\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\":", "= json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json, \"share_norm\", \"Glass vs CH share\", \"lad19nm\",", "], axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x:", "# Scrip to validate glass data import pandas as pd import json import", "alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair( lad_share_comparison, \"glass_sector_place_validation\", driver=driver, path=FIG_PATH ) export_chart(lad_share_comparison, \"glass_sector_place_validation\")", ".mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"),", "sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy,", "> 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\",", "the glass metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge(", "as alt from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division", "plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3", "pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict()", "plot_choro( merged_json, \"share_norm\", \"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300,", "lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map", "= make_glass_meta(companies) # Focus on Scotland # Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True)", "x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart = (", "\"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation", ".assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) - 1) ) return out fetch_nspl() # Lookups", "companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2] for c in", "_SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read everything nspl", "x[\"division\"].value_counts(normalize=True) ) for df, name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1, )", "glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta def make_companies():", "= make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read everything nspl =", "lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\"", "logging.info(\"Making glass metadata\") glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return", "codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes UK as at", "Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies] ] sector_shares", "# Focus on Scotland # Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df", "import requests from zipfile import ZipFile from io import BytesIO import altair as", "variable): \"\"\"Compare distributions between Glass and CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for", "import pandas as pd import json import numpy as np import os import", "CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver,", "# Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted,", "pandas as pd import json import numpy as np import os import logging", "/ x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list", "= f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\")", "x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart,", "as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl =", "\"\"\"Make the companies house table\"\"\" logging.info(\"Making CH\") companies_address = get_address() companies_sector = get_sector()", "def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between Glass and CH\"\"\" out = (", "import ZipFile from io import BytesIO import altair as alt from sg_covid_impact.getters.glass_house import", "make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house = get_glass_house()", "= nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions", "( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"),", "in sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot", "Scotland # Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies]", "and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] )", ".mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\",", "Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"),", "sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list()", "json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json, \"share_norm\", \"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\"", "grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison", "= sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location =", "\"Documents\") # Functions def make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making glass", "f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") # Functions def", ") .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) - 1) )", "<reponame>nestauk/sg_covid_impact # Scrip to validate glass data import pandas as pd import json", "\"glass_ch_correlation\"]) # Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\",", "nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") #", "y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], )", "scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart = (", "numpy as np import os import logging import requests from zipfile import ZipFile", "= [x[0] for x in sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list,", "sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH share\",", ".merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url", "assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, )", "[\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares = (", "def read_nspl( geo=\"laua\", names=\"LA_UA names and codes UK as at 04_20.csv\", geo_var_name=\"LAD20\" ):", "LAD by division coverage lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True)", "sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], )", "companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) #", "df, name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns =", ") ).properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\",", "title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart,", "axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) - 1)", "x in sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) #", "needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target,", "name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\",", ".dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs = sector_shares.sort_values(", "from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape,", "geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna(", "right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json, \"share_norm\", \"Glass vs", "fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP =", "the companies house table\"\"\" logging.info(\"Making CH\") companies_address = get_address() companies_sector = get_sector() companies", "_DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT =", "x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair(", "c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies def", ".transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color(", "ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions sector_comparison_chart =", "UK as at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\",", "as at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA", "( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir", "x: x[\"division\"].value_counts(normalize=True) ) for df, name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1,", "companies_sc], [\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares =", "= name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names and codes UK as", "path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo distributions sh = read_shape() lad_shares", "= sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode(", "glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by division coverage lad_sector_shares", "at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names", ") ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") #", "rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"),", "as np import os import logging import requests from zipfile import ZipFile from", "sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]:", "corr_list = [] for x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\",", "2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes UK as at 04_20.csv\")", "for df in [glass_meta, companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False)", "LAD names and codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes", ").properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None,", "corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False,", "names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def", "x[\"companies\"]) - 1) ) return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\")", "axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150)", "export_chart(glass_validation, \"glass_place_validation\") # LAD by division coverage lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply(", "distributions between Glass and CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for df in", "lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x:", "Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local", "get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta def make_companies(): \"\"\"Make the", "os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf", "comparing sector distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)),", "x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for x in set(lad_sector_shares[\"laua\"]): sel", "df in [glass, ch]], axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"]", "[x[0] for x in sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\",", "x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\"", "NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup", "comparing geo distributions sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr()", "# Chart comparing sector distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs,", "sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), )", "[ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc,", "geo distributions sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged", "table\"\"\" logging.info(\"Making glass metadata\") glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" )", "os.path.join(meta_location, \"LA_UA names and codes UK as at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict()", "ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart", ") merged_json = json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json, \"share_norm\", \"Glass vs CH", ") name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names and codes", "ZipFile from io import BytesIO import altair as alt from sg_covid_impact.getters.glass_house import get_glass_house", "for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies", "name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch, variable):", "pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes UK as at 04_20.csv\") ) name_dict =", "( pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass, ch]], axis=1 ) .rename(columns={0: \"glass\", 1:", "from zipfile import ZipFile from io import BytesIO import altair as alt from", ".assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\")", "\"glass_sector_validation\") # Chart comparing geo distributions sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc,", "columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode(", "scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation,", "companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"]", "make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read everything nspl = read_nspl()", "make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json", ").fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"])", "df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for df, name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"])", "\"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req =", "sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro,", "logging.info(\"Making CH\") companies_address = get_address() companies_sector = get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]]", "] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP)", "companies_sector = get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", )", "read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, ) from", "google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\")", "glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta def make_companies(): \"\"\"Make the companies", "sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP)", "on Scotland # Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta,", "np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1] for k in corr_list}", "pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl", ") name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] =", "(if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\") else:", "correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" )", "import get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description,", "import os import logging import requests from zipfile import ZipFile from io import", "= read_nspl() companies = make_companies() glass_meta = make_glass_meta(companies) # Focus on Scotland #", "\"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda", "0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\",", "def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location,", "sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import (", ".encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"],", "y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]),", "name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes UK as at 04_20.csv\") )", "sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar()", "glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD", "name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass,", "= ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP))", "axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"]", "sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo distributions sh", "in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict", "sector distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\",", "= {k[0]: k[1] for k in corr_list} lads_sorted = [x[0] for x in", "\"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) - 1) ) return out", "zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares", "glass_share_map = ( plot_choro( merged_json, \"share_norm\", \"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\" )", "nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict", "[\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"})", "Glass and CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass, ch]],", "from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver", "= make_companies() glass_meta = make_glass_meta(companies) # Focus on Scotland # Scot glass_meta_sc, companies_sc", "[glass_meta, companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x:", "division coverage lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for", "lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json, \"share_norm\",", "lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x:", "glass metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies,", "lads_sorted = [x[0] for x in sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df =", "import numpy as np import os import logging import requests from zipfile import", "left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map = ( plot_choro( merged_json, \"share_norm\", \"Glass", "tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2)", "get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, )", "corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1] for k", "color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\",", "and CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass, ch]], axis=1", "\"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by division coverage lad_sector_shares = (", "import altair as alt from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector", "axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict)", "[df[[variable]].value_counts(normalize=True) for df in [glass, ch]], axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda", "from io import BytesIO import altair as alt from sg_covid_impact.getters.glass_house import get_glass_house from", "else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between", "\"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\",", "CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass, ch]], axis=1 )", "\"\"\"Lookup between LAD names and codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names", "for k in corr_list} lads_sorted = [x[0] for x in sorted(corr_list, key=lambda x:", "import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup,", "companies, on=\"company_number\" ) return glass_ch_meta def make_companies(): \"\"\"Make the companies house table\"\"\" logging.info(\"Making", "req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and", "# Read everything nspl = read_nspl() companies = make_companies() glass_meta = make_glass_meta(companies) #", "x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list =", "make_companies() glass_meta = make_glass_meta(companies) # Focus on Scotland # Scot glass_meta_sc, companies_sc =", "x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False) .rename(columns={\"level_1\": \"division\"}) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT))", "import ( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup,", "between LAD names and codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names and", "read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\",", "make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA", "), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\",", "= os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies, score=60):", "nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between Glass and CH\"\"\" out =", "04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names and", "extract_sic_code_description, ) from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from", "return nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between Glass and CH\"\"\" out", "strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\",", "Chart comparing geo distributions sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\",", "Scrip to validate glass data import pandas as pd import json import numpy", "ch]], axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) -", "= pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0)", "= read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False),", "names=\"LA_UA names and codes UK as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag", "read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read everything nspl = read_nspl() companies = make_companies()", "def make_companies(): \"\"\"Make the companies house table\"\"\" logging.info(\"Making CH\") companies_address = get_address() companies_sector", "correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False]", "corr_list} lads_sorted = [x[0] for x in sorted(corr_list, key=lambda x: x[1], reverse=True)] lads_corr_df", "alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)),", "= nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between Glass and", "read_nspl( geo=\"laua\", names=\"LA_UA names and codes UK as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read", "nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True)", "\"\"\"Fetch NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already collected", "[glass, ch]], axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"])", "make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart", "companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2]", "for x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x,", "= get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta def make_companies(): \"\"\"Make", "geo=\"laua\", names=\"LA_UA names and codes UK as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and", "google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH", "companies_address = get_address() companies_sector = get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\",", "geo]).dropna( axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] =", "NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\")", "nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between Glass and CH\"\"\"", "ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\",", "tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\")", "BytesIO import altair as alt from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import get_address,", "x: [c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") )", "NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu =", "= [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc,", "\"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]])", "Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT", "True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target)", "\"\"\"Makes the glass metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house = get_glass_house() glass_ch_meta =", ") from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils", "= pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict =", "= read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() # Read everything nspl = read_nspl() companies =", "table\"\"\" logging.info(\"Making CH\") companies_address = get_address() companies_sector = get_sector() companies = ( companies_address[[\"company_number\",", "logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location,", "lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for df, name", "make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between Glass and CH\"\"\" out = ( pd.concat(", "= ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2] for", "driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location =", "nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare", "names and codes UK as at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict", "= sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing", "share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300) corr_chart =", "= \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req", "= ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) )", "on=\"company_number\" ) return glass_ch_meta def make_companies(): \"\"\"Make the companies house table\"\"\" logging.info(\"Making CH\")", "glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in [glass_meta, companies] ] sector_shares =", ".reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate correlations", "make_glass_meta(companies) # Focus on Scotland # Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for", "= np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1] for k in", "nspl = read_nspl() companies = make_companies() glass_meta = make_glass_meta(companies) # Focus on Scotland", "\"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP))", "\"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions sector_comparison_chart", "= alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by", "= name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch,", "height=300) corr_chart = ( alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted,", "alt.Chart(lads_corr_df) .mark_point(filled=True, stroke=\"black\", strokeWidth=0.2) .encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ),", "\"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies, score=60): \"\"\"Makes the", "Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True,", "names and codes 2020\"\"\" name_lu = pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes UK", "sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs", "pd import json import numpy as np import os import logging import requests", "in corr_list} lads_sorted = [x[0] for x in sorted(corr_list, key=lambda x: x[1], reverse=True)]", "= ( plot_choro( merged_json, \"share_norm\", \"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\" ) #", "subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"]", "export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target", "np import os import logging import requests from zipfile import ZipFile from io", "read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import", "ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and codes 2020\"\"\" name_lu = pd.read_csv(", "/ x[\"companies\"]) - 1) ) return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(),", "x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for x in set(lad_sector_shares[\"laua\"]):", "extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup() _LAD_NUTS1_LOOKUP = read_lad_nuts1_lookup() _LAD_NAME_DICT = make_lad_lookup() #", ") return companies def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if", "\"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate", "df in [glass_meta, companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign(", "( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2] for c", "merged_json, \"share_norm\", \"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200)", "lad_sector_shares.columns = [\"glass\", \"ch\"] lad_sector_shares = ( lad_sector_shares.assign(share_norm=lambda x: x[\"glass\"] / x[\"ch\"]) .reset_index(drop=False)", ")[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions sector_comparison_chart = ( alt.Chart(sector_shares)", "f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location", "= requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and codes", ".rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) - 1) ) return", ") export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo distributions sh = read_shape() lad_shares =", "1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1] for k in corr_list} lads_sorted =", "load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, )", "get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x:", "ch, variable): \"\"\"Compare distributions between Glass and CH\"\"\" out = ( pd.concat( [df[[variable]].value_counts(normalize=True)", "from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import", "in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies def fetch_nspl():", "in [glass, ch]], axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] /", "pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for df, name in zip([glass_meta_sc, companies_sc],", "nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad) return nspl def make_shares_comparison(glass, ch, variable): \"\"\"Compare distributions between", "def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True:", ") # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\",", "from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair, ) from sg_covid_impact.utils.altair_s3 import export_chart import sg_covid_impact", "= make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" )", "Focus on Scotland # Scot glass_meta_sc, companies_sc = [ df.query(\"nuts1=='Scotland'\").reset_index(drop=True) for df in", "vs CH share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation =", "divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) #", "codes UK as at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def read_nspl(", "logging.info(\"Already collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def", "\"share_norm\", \"Glass vs CH share\", \"lad19nm\", scale_type=\"linear\" ) # .configure_view(strokeWidth=0) .properties(height=300, width=200) )", "\"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1] for k in corr_list} lads_sorted", "merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map = (", "= pd.read_csv( os.path.join(meta_location, \"LA_UA names and codes UK as at 04_20.csv\") ) name_dict", "( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup, ) from sg_covid_impact.utils.altair_save_utils import ( google_chrome_driver_setup, save_altair,", "= make_lad_lookup() # Read everything nspl = read_nspl() companies = make_companies() glass_meta =", "( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import ( assign_nuts1_to_lad, read_shape, plot_choro, read_lad_nuts1_lookup, make_section_division_lookup,", ") return glass_ch_meta def make_companies(): \"\"\"Make the companies house table\"\"\" logging.info(\"Making CH\") companies_address", "companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x:", "sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale(", "save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo distributions", "= lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1]", "get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive import ( assign_nuts1_to_lad,", "pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect()", "fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target) is True: logging.info(\"Already", ") for df, name in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0)", "zipfile import ZipFile from io import BytesIO import altair as alt from sg_covid_impact.getters.glass_house", "04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl = pd.read_csv(nspl_location, usecols=[\"pcds\",", "return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP = make_section_division_lookup()", "os.path.join(nspl_target, \"Data\", \"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies, score=60): \"\"\"Makes", "= get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda", "FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location = os.path.join(nspl_target, \"Data\",", "= get_address() companies_sector = get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]],", "\"companies\"}) .assign(share_norm=lambda x: (x[\"glass\"] / x[\"companies\"]) - 1) ) return out fetch_nspl() #", "[ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) ) for df, name in zip([glass_meta_sc, companies_sc], [\"glass\",", "False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions sector_comparison_chart = (", "color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH )", "= glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta def make_companies(): \"\"\"Make the companies house", "[\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions", "vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"), ), tooltip=[\"lad_name\", \"division_name\", \"share_norm\"], ) ).properties(width=400, height=300)", "for df in [glass, ch]], axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"}) .assign(share_norm=lambda x:", "( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm > 0) .mark_rect() .encode( y=alt.Y(\"lad_name\", sort=lads_sorted, title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False,", "UK as at 04_20.csv\", geo_var_name=\"LAD20\" ): \"\"\"Read and tag NSPL\"\"\" logging.info(\"Reading NSPL\") nspl", "metadata table\"\"\" logging.info(\"Making glass metadata\") glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\"", "CH\") companies_address = get_address() companies_sector = get_sector() companies = ( companies_address[[\"company_number\", \"postcode\"]] .merge(", "collected NSPL\") else: os.makedirs(nspl_target, exist_ok=True) req = requests.get(nspl_url) zipf = ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"):", "name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"] = nspl[geo].map(name_dict) nspl[\"nuts1\"] = nspl[geo].apply(assign_nuts1_to_lad)", "name_dict def read_nspl( geo=\"laua\", names=\"LA_UA names and codes UK as at 04_20.csv\", geo_var_name=\"LAD20\"", "= ZipFile(BytesIO(req.content)).extractall(nspl_target) def make_lad_lookup(geo_var_name=\"LAD20\"): \"\"\"Lookup between LAD names and codes 2020\"\"\" name_lu =", "x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr])", "metadata\") glass_house = get_glass_house() glass_ch_meta = glass_house.query(f\"score>{score}\").merge( companies, on=\"company_number\" ) return glass_ch_meta def", "x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\", type=\"log\"), legend=alt.Legend(orient=\"bottom\"),", "set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1]) corr_list.append([x, corr]) lads_corr_dict =", "by division coverage lad_sector_shares = ( pd.concat( [ df.groupby(\"laua\").apply( lambda x: x[\"division\"].value_counts(normalize=True) )", "# Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"],", "make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) )", "corr_list.append([x, corr]) lads_corr_dict = {k[0]: k[1] for k in corr_list} lads_sorted = [x[0]", "left_on=\"postcode\", right_on=\"pcds\") ) return companies def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url =", "1) ) return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP = extract_sic_code_description(load_sic_taxonomy(), \"Division\") _SECTION_DIVISION_LOOKUP, _SECTION_NAME_LOOKUP", "return companies def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\" if os.path.exists(nspl_target)", "(x[\"glass\"] / x[\"companies\"]) - 1) ) return out fetch_nspl() # Lookups _DIV_NAME_LOOKUP =", "sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, ) from sg_covid_impact.descriptive", ".merge( companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]], on=\"company_number\", ) .assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda", "companies = make_companies() glass_meta = make_glass_meta(companies) # Focus on Scotland # Scot glass_meta_sc,", "title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None),", "= [] for x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0,", "alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") # LAD by division", "companies house table\"\"\" logging.info(\"Making CH\") companies_address = get_address() companies_sector = get_sector() companies =", "usecols=[\"pcds\", geo]).dropna( axis=0, subset=[geo] ) name_lu = pd.read_csv(os.path.join(meta_location, names)) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() nspl[f\"{geo}_name\"]", "sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart comparing sector distributions sector_comparison_chart = ( alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\",", "section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr()", ") # .configure_view(strokeWidth=0) .properties(height=300, width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\",", "_LAD_NAME_DICT = make_lad_lookup() # Read everything nspl = read_nspl() companies = make_companies() glass_meta", "from sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division import ( load_sic_taxonomy, extract_sic_code_description, ) from", "axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100,", "lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair( lad_share_comparison, \"glass_sector_place_validation\", driver=driver, path=FIG_PATH )", "# Chart comparing geo distributions sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\")", ".assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .assign(lad_name=lambda x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for x in", ".encode( y=alt.Y( \"lad_name\", title=None, sort=lads_sorted, axis=alt.Axis(labels=False, ticks=False, grid=True), ), x=alt.X(\"glass_ch_correlation\", title=[\"Glass-CH sector\", \"share", "meta_location = os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies, score=60): \"\"\"Makes the glass metadata", "make_companies(): \"\"\"Make the companies house table\"\"\" logging.info(\"Making CH\") companies_address = get_address() companies_sector =", "companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json =", "right_on=\"pcds\") ) return companies def fetch_nspl(): \"\"\"Fetch NSPL (if needed)\"\"\" nspl_url = \"https://www.arcgis.com/sharing/rest/content/items/4df8a1a188e74542aebee164525d7ca9/data\"", "= sh.merge( lad_shares.reset_index(drop=False), left_on=\"lad19cd\", right_on=\"laua\" ) merged_json = json.loads(merged.to_json()) glass_share_map = ( plot_choro(", "{k[0]: k[1] for k in corr_list} lads_sorted = [x[0] for x in sorted(corr_list,", "x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\", right_on=\"pcds\") ) return companies def fetch_nspl(): \"\"\"Fetch", ") .dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs =", "x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() #", "k in corr_list} lads_sorted = [x[0] for x in sorted(corr_list, key=lambda x: x[1],", "sh = read_shape() lad_shares = make_shares_comparison(glass_meta_sc, companies_sc, \"laua\") lad_shares[[\"glass\", \"companies\"]].corr() merged = sh.merge(", "width=200) ) glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\")", "[] for x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr = np.float(sel[[\"glass\", \"ch\"]].corr().iloc[0, 1])", "\"NSPL_NOV_2020_UK.csv\") meta_location = os.path.join(nspl_target, \"Documents\") # Functions def make_glass_meta(companies, score=60): \"\"\"Makes the glass", "alt.Chart(sector_shares) .mark_bar() .encode( y=alt.Y(\"division\", sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\",", "from sg_covid_impact.getters.glass_house import get_glass_house from sg_covid_impact.getters.companies_house import get_address, get_sector from sg_covid_impact.make_sic_division import (", ".map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions", "everything nspl = read_nspl() companies = make_companies() glass_meta = make_glass_meta(companies) # Focus on", "( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda x: x[\"division\"] .map(_SECTION_DIVISION_LOOKUP) .map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0)", ") .assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]]) .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP)) .merge(nspl, left_on=\"postcode\",", ".map(_SECTION_NAME_LOOKUP) ) .dropna(axis=0) ) # Calculate correlations sector_shares[[\"glass\", \"companies\"]].corr() # Sorted divisions sorted_divs", "sorted_divs = sector_shares.sort_values( [\"section_name\", \"share_norm\"], ascending=[True, False] )[\"division\"].to_list() sector_shares[\"division_name\"] = sector_shares[\"division\"].map(_DIV_NAME_LOOKUP) # Chart", "\"share correlation\"]), color=alt.Color(\"glass_ch_correlation\", legend=None), ) ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\"", "vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\",", ") glass_validation = alt.hconcat(sector_comparison_chart, glass_share_map) glass_validation save_altair(glass_validation, \"glass_place_validation\", driver, path=FIG_PATH) export_chart(glass_validation, \"glass_place_validation\") #", "pd.concat( [df[[variable]].value_counts(normalize=True) for df in [glass, ch]], axis=1 ) .rename(columns={0: \"glass\", 1: \"companies\"})", ") corr_list = [] for x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\") corr =", "share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH", "import export_chart import sg_covid_impact project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup()", "in [glass_meta, companies] ] sector_shares = ( make_shares_comparison(glass_meta_sc, companies_sc, \"division\") .reset_index(drop=False) .assign( section_name=lambda", "reverse=True)] lads_corr_df = pd.DataFrame(corr_list, columns=[\"lad_name\", \"glass_ch_correlation\"]) # Plot rep_chart = ( alt.Chart(lad_sector_shares) .transform_filter(alt.datum.share_norm", "sort=sorted_divs, axis=alt.Axis(labels=False, ticks=False)), x=alt.X(\"share_norm\", title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300,", "x: x[\"laua\"].map(_LAD_NAME_DICT)) ) corr_list = [] for x in set(lad_sector_shares[\"laua\"]): sel = lad_sector_shares.query(f\"laua=='{x}'\")", "\"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo distributions sh =", "title=\"Local Authority\"), x=alt.X(\"division\", axis=alt.Axis(labels=False, ticks=False)), color=alt.Color( \"share_norm\", sort=\"descending\", title=\"Glass vs CH share\", scale=alt.Scale(scheme=\"Spectral\",", "project_dir = sg_covid_impact.project_dir FIG_PATH = f\"{project_dir}/figures/scotland\" driver = google_chrome_driver_setup() nspl_target = f\"{project_dir}/data/raw/nspl\" nspl_location", "and codes UK as at 04_20.csv\") ) name_dict = name_lu.set_index(f\"{geo_var_name}CD\")[f\"{geo_var_name}NM\"].to_dict() return name_dict def", "title=\"Glass vs CH share\"), color=alt.Color(\"section_name\", title=\"Section\"), tooltip=[\"division_name\"], ) ).properties(height=300, width=150) sector_comparison_chart save_altair( sector_comparison_chart,", "in zip([glass_meta_sc, companies_sc], [\"glass\", \"ch\"]) ], axis=1, ) ).fillna(0) lad_sector_shares.columns = [\"glass\", \"ch\"]", "legend=None), ) ).properties(width=100, height=300) lad_share_comparison = alt.hconcat(rep_chart, corr_chart, spacing=1).resolve_scale( color=\"independent\" ) save_altair( lad_share_comparison,", "sector_comparison_chart save_altair( sector_comparison_chart, \"glass_sector_validation\", driver=driver, path=FIG_PATH ) export_chart(sector_comparison_chart, \"glass_sector_validation\") # Chart comparing geo" ]
[ "name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter", "version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter client\",", "def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\",", "classifiers=[ \"Development Status :: 4 - Beta\", \"Intended Audience :: End Users/Desktop\", \"Topic", "System :: OS Independent\", \"License :: OSI Approved :: MIT License\", ], entry_points={},", "find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\",", "#!/usr/bin/env python # -*- coding: utf-8 -*- import codecs import os from setuptools", "Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development Status :: 4", "4 - Beta\", \"Intended Audience :: End Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming", ":: End Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming Language :: Python\", \"Programming Language", ":: 3.7\", \"Operating System :: OS Independent\", \"License :: OSI Approved :: MIT", "packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development Status :: 4 - Beta\",", "-*- import codecs import os from setuptools import find_packages, setup def read(fname): file_path", "from setuptools import find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path,", "python # -*- coding: utf-8 -*- import codecs import os from setuptools import", "Language :: Python :: 3.7\", \"Operating System :: OS Independent\", \"License :: OSI", ":: OS Independent\", \"License :: OSI Approved :: MIT License\", ], entry_points={}, )", "license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], #", "setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable", "# FIXME classifiers=[ \"Development Status :: 4 - Beta\", \"Intended Audience :: End", "and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development Status", "# -*- coding: utf-8 -*- import codecs import os from setuptools import find_packages,", "codecs import os from setuptools import find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__),", "import codecs import os from setuptools import find_packages, setup def read(fname): file_path =", ":: Adaptive Technologies\", \"Programming Language :: Python\", \"Programming Language :: Python :: 3.7\",", "3.7\", \"Operating System :: OS Independent\", \"License :: OSI Approved :: MIT License\",", "= os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\",", "\"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development Status :: 4 - Beta\", \"Intended", "Audience :: End Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming Language :: Python\", \"Programming", "return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An", "Language :: Python\", \"Programming Language :: Python :: 3.7\", \"Operating System :: OS", "python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development Status :: 4 - Beta\", \"Intended Audience", "install_requires=[\"\"], # FIXME classifiers=[ \"Development Status :: 4 - Beta\", \"Intended Audience ::", "Python\", \"Programming Language :: Python :: 3.7\", \"Operating System :: OS Independent\", \"License", "file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\",", "import os from setuptools import find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname)", "read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\",", "os from setuptools import find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return", "author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\",", "maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"],", "url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME", "Technologies\", \"Programming Language :: Python\", \"Programming Language :: Python :: 3.7\", \"Operating System", "encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and", "utf-8 -*- import codecs import os from setuptools import find_packages, setup def read(fname):", "\"Programming Language :: Python :: 3.7\", \"Operating System :: OS Independent\", \"License ::", "client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development Status :: 4 -", "Adaptive Technologies\", \"Programming Language :: Python\", \"Programming Language :: Python :: 3.7\", \"Operating", "author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]),", "- Beta\", \"Intended Audience :: End Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming Language", "maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\",", ":: 4 - Beta\", \"Intended Audience :: End Users/Desktop\", \"Topic :: Adaptive Technologies\",", "os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\",", "\"Operating System :: OS Independent\", \"License :: OSI Approved :: MIT License\", ],", "\"Topic :: Adaptive Technologies\", \"Programming Language :: Python\", \"Programming Language :: Python ::", "fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\",", "setuptools import find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read()", "FIXME classifiers=[ \"Development Status :: 4 - Beta\", \"Intended Audience :: End Users/Desktop\",", "\"Development Status :: 4 - Beta\", \"Intended Audience :: End Users/Desktop\", \"Topic ::", "\"Intended Audience :: End Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming Language :: Python\",", "End Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming Language :: Python\", \"Programming Language ::", "\"Programming Language :: Python\", \"Programming Language :: Python :: 3.7\", \"Operating System ::", "customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development Status ::", ":: Python :: 3.7\", \"Operating System :: OS Independent\", \"License :: OSI Approved", "Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming Language :: Python\", \"Programming Language :: Python", "description=\"An accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[", ":: Python\", \"Programming Language :: Python :: 3.7\", \"Operating System :: OS Independent\",", "Beta\", \"Intended Audience :: End Users/Desktop\", \"Topic :: Adaptive Technologies\", \"Programming Language ::", "-*- coding: utf-8 -*- import codecs import os from setuptools import find_packages, setup", "coding: utf-8 -*- import codecs import os from setuptools import find_packages, setup def", "codecs.open(file_path, encoding=\"utf-8\").read() setup( name=\"pardal\", version=\"0.1.0\", author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", license=\"MIT\", url=\"https://github.com/anapaulagomes/pardal\", description=\"An accessible", "Python :: 3.7\", \"Operating System :: OS Independent\", \"License :: OSI Approved ::", "import find_packages, setup def read(fname): file_path = os.path.join(os.path.dirname(__file__), fname) return codecs.open(file_path, encoding=\"utf-8\").read() setup(", "Status :: 4 - Beta\", \"Intended Audience :: End Users/Desktop\", \"Topic :: Adaptive", "accessible and customizable Twitter client\", packages=find_packages(exclude=[\"tests\", \"docs\"]), python_requires=\">=3.7\", install_requires=[\"\"], # FIXME classifiers=[ \"Development" ]
[ "return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the dataframe has at least one row", "a bool value: True if the dataframe has at least one row of", "following functions: * test_column_names - returns bool if the column name match *", "if __name__ == '__main__': \"\"\"Main function Returns ------- bool a bool value if", "columns: try: tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name = (", "'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test if the", "specified as the second argument 3. Values in each column have the same", "module and contains the following functions: * test_column_names - returns bool if the", "True if the dataframe has expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist =", "values of the correct type Parameters ---------- df_import : Pandas Dataframe The dataset", "tool checks if the dataframe: 1. Has at least 10 rows of data", "dataframe has expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime',", "has non value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the dataframe has", "a bool value if the dataframe pass all the tests \"\"\" DATAFRAME =", "Contains only the columns that specified as the second argument 3. Values in", "the dataframe has at least one row of data Parameters ---------- df_import :", "Pandas Dataframe Returns ------- bool a bool value: True if the datatype of", "dataset imported as Pandas Dataframe Returns ------- bool a bool value: True if", "This file can also be imported as a module and contains the following", "the tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10 rows", "bool value: True if the datatype of each column match \"\"\" columns =", "True def test_nan_values(df_import): \"\"\"Test if the dataframe has non value Parameters ---------- df_import", "data 2. Contains only the columns that specified as the second argument 3.", "the dataframe has expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime',", "functions: * test_column_names - returns bool if the column name match * test_nan_values", "10 rows of data 2. Contains only the columns that specified as the", "* main - the main function of the script \"\"\" import pandas as", "bool a bool value: True if the dataframe has non value \"\"\" return", "quality This script downloads a dataset from Seattle Open Data Portal and imports", "the datatype of each column match \"\"\" columns = list(df_import) for name in", "are running this script in. This file can also be imported as a", "dataframe has at least one row of data Parameters ---------- df_import : Pandas", "if the column name match * test_nan_values - returns bool if the dataframe", "that `pandas` be installed within the Python environment you are running this script", "dataframe has expected columns Parameters ---------- df_import : Pandas Dataframe The dataset imported", "least one row of data Parameters ---------- df_import : Pandas Dataframe The dataset", "Returns ------- bool a bool value: True if the datatype of each column", "Pandas Dataframe The dataset imported as Pandas Dataframe Returns ------- bool a bool", "test_column_names(df_import): \"\"\"Test if the dataframe has expected columns Parameters ---------- df_import : Pandas", "Has at least 10 rows of data 2. Contains only the columns that", "only fetch first 10 rows for testing DATAFRAME = DATAFRAME.head(10) print(test_column_names(DATAFRAME) & test_datatype(DATAFRAME)", "pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all columns have values", "installed within the Python environment you are running this script in. This file", "df_import : Pandas Dataframe The dataset imported as Pandas Dataframe Returns ------- bool", "\"\"\"Test if the dataframe has expected columns Parameters ---------- df_import : Pandas Dataframe", "in. This file can also be imported as a module and contains the", "match \"\"\" columns = list(df_import) for name in columns: try: tp_name = (", "nan value * test_least_row_counts - returns bool if the dataframe has at least", "* test_nan_values - returns bool if the dataframe has nan value * test_least_row_counts", "a bool value: True if the dataframe has non value \"\"\" return df_import.notnull().values.any()", "and contains the following functions: * test_column_names - returns bool if the column", "in each column have the same python type This script requires that `pandas`", "'birthyear'] if df_import_columns == sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test if the dataframe", "df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the dataframe has at least one row of", "the dataframe has at least one row of data \"\"\" return df_import.shape[0] >=", "has at least one row of data * main - the main function", "Python environment you are running this script in. This file can also be", "Dataframe The dataset imported as Pandas Dataframe Returns ------- bool a bool value:", "name match * test_nan_values - returns bool if the dataframe has nan value", "has expected columns Parameters ---------- df_import : Pandas Dataframe The dataset imported as", "dataframe and check the quality This script downloads a dataset from Seattle Open", "isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name", "within the Python environment you are running this script in. This file can", "also be imported as a module and contains the following functions: * test_column_names", "return True def test_nan_values(df_import): \"\"\"Test if the dataframe has non value Parameters ----------", "value Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas Dataframe", "bool if the dataframe has nan value * test_least_row_counts - returns bool if", "value: True if the dataframe has at least one row of data \"\"\"", "datatype of each column match \"\"\" columns = list(df_import) for name in columns:", "def test_datatype(df_import): \"\"\"Test if all columns have values of the correct type Parameters", "df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test if the dataframe has expected columns Parameters", "1. Has at least 10 rows of data 2. Contains only the columns", "test_column_names - returns bool if the column name match * test_nan_values - returns", "of the script \"\"\" import pandas as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def", "df_import.shape[0] >= 1 if __name__ == '__main__': \"\"\"Main function Returns ------- bool a", "dataframe: 1. Has at least 10 rows of data 2. Contains only the", "as a module and contains the following functions: * test_column_names - returns bool", "bool value: True if the dataframe has non value \"\"\" return df_import.notnull().values.any() def", "------- bool a bool value: True if the dataframe has non value \"\"\"", "a dataset from Seattle Open Data Portal and imports as a Pandas Dataframe.", "( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return", "of data 2. Contains only the columns that specified as the second argument", "has expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid',", "except AttributeError: tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test", "3. Values in each column have the same python type This script requires", "- returns bool if the dataframe has nan value * test_least_row_counts - returns", "at least one row of data Parameters ---------- df_import : Pandas Dataframe The", "return df_import.shape[0] >= 1 if __name__ == '__main__': \"\"\"Main function Returns ------- bool", "as Pandas Dataframe Returns ------- bool a bool value: True if the datatype", "if all columns have values of the correct type Parameters ---------- df_import :", "expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration',", "the dataframe: 1. Has at least 10 rows of data 2. Contains only", "return tp_name def test_column_names(df_import): \"\"\"Test if the dataframe has expected columns Parameters ----------", "columns that specified as the second argument 3. Values in each column have", "data * main - the main function of the script \"\"\" import pandas", "be installed within the Python environment you are running this script in. This", "rows of data 2. Contains only the columns that specified as the second", "if the dataframe has non value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if", "DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10 rows for testing DATAFRAME", "returns bool if the dataframe has nan value * test_least_row_counts - returns bool", "one row of data Parameters ---------- df_import : Pandas Dataframe The dataset imported", "def test_least_row_counts(df_import): \"\"\"Test if the dataframe has at least one row of data", "column have the same python type This script requires that `pandas` be installed", "dataframe has non value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the dataframe", "Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas Dataframe Returns", "- returns bool if the dataframe has at least one row of data", "sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype',", "for name in columns: try: tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError:", "\"\"\"Main function Returns ------- bool a bool value if the dataframe pass all", "'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist): return True def test_nan_values(df_import):", "has nan value * test_least_row_counts - returns bool if the dataframe has at", "name in columns: try: tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name", "row of data * main - the main function of the script \"\"\"", "returns bool if the column name match * test_nan_values - returns bool if", "pandas as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all columns", "columns Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas Dataframe", "dataframe pass all the tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch", "imports as a Pandas Dataframe. This tool checks if the dataframe: 1. Has", "value: True if the datatype of each column match \"\"\" columns = list(df_import)", "DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all columns have values of", "pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10 rows for testing DATAFRAME = DATAFRAME.head(10)", "'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist): return", "Dataframe. This tool checks if the dataframe: 1. Has at least 10 rows", "that specified as the second argument 3. Values in each column have the", "dataframe has at least one row of data \"\"\" return df_import.shape[0] >= 1", "if the dataframe has expected columns Parameters ---------- df_import : Pandas Dataframe The", "\"\"\"Test if the dataframe has non value Parameters ---------- df_import : Pandas Dataframe", "isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test if the dataframe has expected", "each column match \"\"\" columns = list(df_import) for name in columns: try: tp_name", "'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns", "# only fetch first 10 rows for testing DATAFRAME = DATAFRAME.head(10) print(test_column_names(DATAFRAME) &", "the Python environment you are running this script in. This file can also", "= ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear']", "as Pandas Dataframe Returns ------- bool a bool value: True if the dataframe", "script downloads a dataset from Seattle Open Data Portal and imports as a", "def test_nan_values(df_import): \"\"\"Test if the dataframe has non value Parameters ---------- df_import :", "match * test_nan_values - returns bool if the dataframe has nan value *", "if the datatype of each column match \"\"\" columns = list(df_import) for name", "one row of data * main - the main function of the script", "`pandas` be installed within the Python environment you are running this script in.", "\"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name',", "as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all columns have", "'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist):", "data Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas Dataframe", "'__main__': \"\"\"Main function Returns ------- bool a bool value if the dataframe pass", "------- bool a bool value if the dataframe pass all the tests \"\"\"", "column match \"\"\" columns = list(df_import) for name in columns: try: tp_name =", "dataset from Seattle Open Data Portal and imports as a Pandas Dataframe. This", "\"\"\"Create dataframe and check the quality This script downloads a dataset from Seattle", "test_least_row_counts - returns bool if the dataframe has at least one row of", "'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns ==", "bool a bool value if the dataframe pass all the tests \"\"\" DATAFRAME", "rows for testing DATAFRAME = DATAFRAME.head(10) print(test_column_names(DATAFRAME) & test_datatype(DATAFRAME) & test_least_row_counts(DATAFRAME) & test_nan_values(DATAFRAME))", "import pandas as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all", "Returns ------- bool a bool value: True if the dataframe has at least", "function Returns ------- bool a bool value if the dataframe pass all the", "The dataset imported as Pandas Dataframe Returns ------- bool a bool value: True", "---------- df_import : Pandas Dataframe The dataset imported as Pandas Dataframe Returns -------", "has non value Parameters ---------- df_import : Pandas Dataframe The dataset imported as", "= pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10 rows for testing DATAFRAME =", "if the dataframe pass all the tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') #", "'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test", "columns have values of the correct type Parameters ---------- df_import : Pandas Dataframe", "type Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas Dataframe", "non value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the dataframe has at", "the dataframe has non value Parameters ---------- df_import : Pandas Dataframe The dataset", "script in. This file can also be imported as a module and contains", "Dataframe Returns ------- bool a bool value: True if the dataframe has expected", "columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name',", "bool value: True if the dataframe has at least one row of data", "value if the dataframe pass all the tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD')", "Open Data Portal and imports as a Pandas Dataframe. This tool checks if", "2. Contains only the columns that specified as the second argument 3. Values", "\"\"\" import pandas as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if", "['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if", "same python type This script requires that `pandas` be installed within the Python", "have the same python type This script requires that `pandas` be installed within", "10 rows for testing DATAFRAME = DATAFRAME.head(10) print(test_column_names(DATAFRAME) & test_datatype(DATAFRAME) & test_least_row_counts(DATAFRAME) &", "= ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test if the dataframe", "Pandas Dataframe Returns ------- bool a bool value: True if the dataframe has", "from Seattle Open Data Portal and imports as a Pandas Dataframe. This tool", "dataframe has non value Parameters ---------- df_import : Pandas Dataframe The dataset imported", "------- bool a bool value: True if the datatype of each column match", "you are running this script in. This file can also be imported as", "'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist): return True", "at least one row of data * main - the main function of", "check the quality This script downloads a dataset from Seattle Open Data Portal", "downloads a dataset from Seattle Open Data Portal and imports as a Pandas", "This script requires that `pandas` be installed within the Python environment you are", "the script \"\"\" import pandas as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import):", "columns = list(df_import) for name in columns: try: tp_name = ( isinstance( df_import[name].iloc[1].item(),", "least one row of data * main - the main function of the", "and check the quality This script downloads a dataset from Seattle Open Data", "expected columns Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas", "df_import_columns == sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test if the dataframe has non", "of data Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas", "1 if __name__ == '__main__': \"\"\"Main function Returns ------- bool a bool value", "correct type Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas", "the column name match * test_nan_values - returns bool if the dataframe has", "row of data Parameters ---------- df_import : Pandas Dataframe The dataset imported as", "* test_column_names - returns bool if the column name match * test_nan_values -", "if the dataframe has nan value * test_least_row_counts - returns bool if the", "test_datatype(df_import): \"\"\"Test if all columns have values of the correct type Parameters ----------", "tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name = ( isinstance( df_import[name].iloc[1],", "a module and contains the following functions: * test_column_names - returns bool if", "if the dataframe has expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id',", "value * test_least_row_counts - returns bool if the dataframe has at least one", "least one row of data \"\"\" return df_import.shape[0] >= 1 if __name__ ==", "first 10 rows for testing DATAFRAME = DATAFRAME.head(10) print(test_column_names(DATAFRAME) & test_datatype(DATAFRAME) & test_least_row_counts(DATAFRAME)", "- returns bool if the column name match * test_nan_values - returns bool", "has at least one row of data \"\"\" return df_import.shape[0] >= 1 if", "the dataframe pass all the tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only", "= pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all columns have values of the", "python type This script requires that `pandas` be installed within the Python environment", "bool value: True if the dataframe has expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist())", "of each column match \"\"\" columns = list(df_import) for name in columns: try:", "imported as Pandas Dataframe Returns ------- bool a bool value: True if the", "argument 3. Values in each column have the same python type This script", "the main function of the script \"\"\" import pandas as pd DATAFRAMES =", "bool value if the dataframe pass all the tests \"\"\" DATAFRAME = pd.read_csv(", "main - the main function of the script \"\"\" import pandas as pd", "the second argument 3. Values in each column have the same python type", "This script downloads a dataset from Seattle Open Data Portal and imports as", "value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the dataframe has at least", "\"\"\"Test if the dataframe has at least one row of data Parameters ----------", "of the correct type Parameters ---------- df_import : Pandas Dataframe The dataset imported", "if the dataframe has non value Parameters ---------- df_import : Pandas Dataframe The", "at least one row of data \"\"\" return df_import.shape[0] >= 1 if __name__", "as a Pandas Dataframe. This tool checks if the dataframe: 1. Has at", "the dataframe has non value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the", "all the tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10", "\"\"\" columns = list(df_import) for name in columns: try: tp_name = ( isinstance(", "of data \"\"\" return df_import.shape[0] >= 1 if __name__ == '__main__': \"\"\"Main function", "df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test if the dataframe has expected columns", "= list(df_import) for name in columns: try: tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist()", "True if the dataframe has non value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test", "Dataframe Returns ------- bool a bool value: True if the datatype of each", "fetch first 10 rows for testing DATAFRAME = DATAFRAME.head(10) print(test_column_names(DATAFRAME) & test_datatype(DATAFRAME) &", "and imports as a Pandas Dataframe. This tool checks if the dataframe: 1.", "non value Parameters ---------- df_import : Pandas Dataframe The dataset imported as Pandas", "bool if the dataframe has at least one row of data * main", "Returns ------- bool a bool value: True if the dataframe has expected columns", "one row of data \"\"\" return df_import.shape[0] >= 1 if __name__ == '__main__':", "\"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10 rows for testing", "column name match * test_nan_values - returns bool if the dataframe has nan", "data \"\"\" return df_import.shape[0] >= 1 if __name__ == '__main__': \"\"\"Main function Returns", "requires that `pandas` be installed within the Python environment you are running this", "the dataframe has nan value * test_least_row_counts - returns bool if the dataframe", "'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist): return True def", "\"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import): \"\"\"Test if the dataframe has at least one", "function of the script \"\"\" import pandas as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD')", "script \"\"\" import pandas as pd DATAFRAMES = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test", "at least 10 rows of data 2. Contains only the columns that specified", "__name__ == '__main__': \"\"\"Main function Returns ------- bool a bool value if the", "bool a bool value: True if the datatype of each column match \"\"\"", "have values of the correct type Parameters ---------- df_import : Pandas Dataframe The", "be imported as a module and contains the following functions: * test_column_names -", "imported as a module and contains the following functions: * test_column_names - returns", "bool a bool value: True if the dataframe has at least one row", "Seattle Open Data Portal and imports as a Pandas Dataframe. This tool checks", "type This script requires that `pandas` be installed within the Python environment you", "test_nan_values(df_import): \"\"\"Test if the dataframe has non value Parameters ---------- df_import : Pandas", "'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all columns have values of the correct type", "in columns: try: tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name =", "Pandas Dataframe. This tool checks if the dataframe: 1. Has at least 10", "= ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist()", "Values in each column have the same python type This script requires that", "a bool value: True if the dataframe has expected columns \"\"\" df_import_columns =", "This tool checks if the dataframe: 1. Has at least 10 rows of", "a Pandas Dataframe. This tool checks if the dataframe: 1. Has at least", "Data Portal and imports as a Pandas Dataframe. This tool checks if the", "script requires that `pandas` be installed within the Python environment you are running", "pass all the tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first", "------- bool a bool value: True if the dataframe has expected columns \"\"\"", "tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test if the", "value: True if the dataframe has non value \"\"\" return df_import.notnull().values.any() def test_least_row_counts(df_import):", "* test_least_row_counts - returns bool if the dataframe has at least one row", "the columns that specified as the second argument 3. Values in each column", "the dataframe has at least one row of data * main - the", "second argument 3. Values in each column have the same python type This", "as the second argument 3. Values in each column have the same python", "True if the datatype of each column match \"\"\" columns = list(df_import) for", "the same python type This script requires that `pandas` be installed within the", "Portal and imports as a Pandas Dataframe. This tool checks if the dataframe:", "if the dataframe has at least one row of data Parameters ---------- df_import", "= sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id',", "Returns ------- bool a bool value if the dataframe pass all the tests", "try: tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name = ( isinstance(", "df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except AttributeError: tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def", "df_import[name].map(type))).any().tolist() except AttributeError: tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import):", "can also be imported as a module and contains the following functions: *", "each column have the same python type This script requires that `pandas` be", "'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10 rows for testing DATAFRAME = DATAFRAME.head(10) print(test_column_names(DATAFRAME)", "bool if the column name match * test_nan_values - returns bool if the", "df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id', 'to_station_id', 'usertype', 'gender',", "df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist = ['trip_id', 'starttime', 'stoptime', 'bikeid', 'tripduration', 'from_station_name', 'to_station_name', 'from_station_id',", "tp_name def test_column_names(df_import): \"\"\"Test if the dataframe has expected columns Parameters ---------- df_import", "least 10 rows of data 2. Contains only the columns that specified as", "returns bool if the dataframe has at least one row of data *", "Dataframe Returns ------- bool a bool value: True if the dataframe has non", "file can also be imported as a module and contains the following functions:", "row of data \"\"\" return df_import.shape[0] >= 1 if __name__ == '__main__': \"\"\"Main", "environment you are running this script in. This file can also be imported", ">= 1 if __name__ == '__main__': \"\"\"Main function Returns ------- bool a bool", "test_least_row_counts(df_import): \"\"\"Test if the dataframe has at least one row of data Parameters", "dataframe has at least one row of data * main - the main", "checks if the dataframe: 1. Has at least 10 rows of data 2.", "if the dataframe: 1. Has at least 10 rows of data 2. Contains", "running this script in. This file can also be imported as a module", "pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') def test_datatype(df_import): \"\"\"Test if all columns have values of the correct", "AttributeError: tp_name = ( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test if", "tests \"\"\" DATAFRAME = pd.read_csv( 'https://data.seattle.gov/api/views/tw7j-df_importaw/rows.csv?accessType=DOWNLOAD') # only fetch first 10 rows for", "sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test if the dataframe has non value Parameters", "the following functions: * test_column_names - returns bool if the column name match", "test_nan_values - returns bool if the dataframe has nan value * test_least_row_counts -", "'usertype', 'gender', 'birthyear'] if df_import_columns == sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test if", "def test_column_names(df_import): \"\"\"Test if the dataframe has expected columns Parameters ---------- df_import :", "a bool value: True if the datatype of each column match \"\"\" columns", "Dataframe Returns ------- bool a bool value: True if the dataframe has at", "\"\"\"Test if all columns have values of the correct type Parameters ---------- df_import", "True if the dataframe has at least one row of data \"\"\" return", "the dataframe has expected columns Parameters ---------- df_import : Pandas Dataframe The dataset", "Returns ------- bool a bool value: True if the dataframe has non value", "list(df_import) for name in columns: try: tp_name = ( isinstance( df_import[name].iloc[1].item(), df_import[name].map(type))).any().tolist() except", "the correct type Parameters ---------- df_import : Pandas Dataframe The dataset imported as", ": Pandas Dataframe The dataset imported as Pandas Dataframe Returns ------- bool a", "has at least one row of data Parameters ---------- df_import : Pandas Dataframe", "if df_import_columns == sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test if the dataframe has", "of data * main - the main function of the script \"\"\" import", "== '__main__': \"\"\"Main function Returns ------- bool a bool value if the dataframe", "main function of the script \"\"\" import pandas as pd DATAFRAMES = pd.read_csv(", "\"\"\" return df_import.shape[0] >= 1 if __name__ == '__main__': \"\"\"Main function Returns -------", "the quality This script downloads a dataset from Seattle Open Data Portal and", "this script in. This file can also be imported as a module and", "------- bool a bool value: True if the dataframe has at least one", "only the columns that specified as the second argument 3. Values in each", "== sorted(df_import_checklist): return True def test_nan_values(df_import): \"\"\"Test if the dataframe has non value", "if the dataframe has at least one row of data \"\"\" return df_import.shape[0]", "contains the following functions: * test_column_names - returns bool if the column name", "( isinstance( df_import[name].iloc[1], df_import[name].map(type))).any().tolist() return tp_name def test_column_names(df_import): \"\"\"Test if the dataframe has", "all columns have values of the correct type Parameters ---------- df_import : Pandas", "value: True if the dataframe has expected columns \"\"\" df_import_columns = sorted(df_import.columns.tolist()) df_import_checklist", "- the main function of the script \"\"\" import pandas as pd DATAFRAMES", "bool a bool value: True if the dataframe has expected columns \"\"\" df_import_columns", "dataframe has nan value * test_least_row_counts - returns bool if the dataframe has", "if the dataframe has at least one row of data * main -" ]
[ "with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer =", "structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer", "self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def", "= zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value, 8)", "self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME)", "def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition)", "testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints,", "zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter()", "readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE,", "zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer", "= zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints()", "zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter()", "= self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer =", "self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED,", "def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition)", "class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer =", "getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer", "setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE,", "writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer,", "getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader =", "testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints", "structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod", "self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter()", "structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value, 8) writer.write_bool(True) writer.write_bits(whiteColor.value, 8) writer.write_bool(True)", "self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color)", "self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException):", "readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def", "with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader", "structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def", "writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True,", "zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def", "zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints", "bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color)", "self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self):", "self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE)", "import zserio from testutils import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api =", "zserio from testutils import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__,", "zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader)", "= zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME)", "self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints =", "self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE)", "writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value,", "self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader =", "zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer", "writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True,", "testutils import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def", "testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints", "cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE)", "\"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array,", "@classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer,", "self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED,", "self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE,", "self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter()", "= self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self):", "testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints", "True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor,", "with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader", "self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer", "writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer,", "True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color)", "structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer)", "structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def", "self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader)", "self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader)", "structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK,", "writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints =", "reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer", "with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value, 8) writer.write_bool(True) writer.write_bits(whiteColor.value,", "self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True,", "structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array,", "StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter()", "from testutils import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints", "self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter()", "self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def", "self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints", "with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer =", "self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer", "= zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints =", "self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK,", "writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE,", "def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException):", "testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer)", "zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with", "readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with", "= self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED,", "= zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self):", "= self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self):", "self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader)", "= zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer =", "testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer)", "structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array,", "= zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints()", "reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer", "def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK,", "structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader", "structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array,", "testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer)", "structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with", "@staticmethod def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value, 8) writer.write_bool(True) writer.write_bits(whiteColor.value, 8) writer.write_bool(True) writer.write_bits(purpleColor.value,", "= zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints,", "structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE,", "= self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def", "True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK,", "def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException):", "self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException):", "reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE,", "bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints =", "self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE,", "writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints =", "self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color)", "def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition)", "writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints =", "self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self):", "structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints", "zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with", "readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer =", "= zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer =", "self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter()", "zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with", "structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with", "self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def", "def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value, 8) writer.write_bool(True) writer.write_bits(whiteColor.value, 8) writer.write_bool(True) writer.write_bits(purpleColor.value, 16)", "structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True,", "self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value, 8) writer.write_bool(True) writer.write_bits(whiteColor.value, 8)", "= zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints()", "import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api = getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self):", "zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK,", "= zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE)", "self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader =", "= self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK,", "structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer,", "def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException):", "self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor, purpleColor):", "unittest import zserio from testutils import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.api", "self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer,", "def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints =", "import unittest import zserio from testutils import getZserioApi class StructureConstraintsTest(unittest.TestCase): @classmethod def setUpClass(cls):", "self.api.BasicColor.WHITE, True, self.api.ExtendedColor.LIME) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor,", "with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer =", "= getZserioApi(__file__, \"constraints.zs\").structure_constraints def testReadCorrectColors(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader", "reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints", "writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self):", "self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.RED, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints", "readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints) def testWriteWrongBlackConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.RED, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE)", "self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongWhiteConstraint(self): structureConstraints =", "True, self.api.ExtendedColor.PURPLE) writer = zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) def testWriteWrongPurpleConstraint(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK,", "= zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints()", "self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self):", "= self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE,", "zserio.BitStreamWriter() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.write(writer) @staticmethod def _write(writer, blackColor, whiteColor, purpleColor): writer.write_bits(blackColor.value, 8) writer.write_bool(True)", "= zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() structureConstraints.read(reader) self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color)", "writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints =", "self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE,", "zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, readStructureConstraints.purple_color) self.assertEqual(structureConstraints, readStructureConstraints)", "self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException):", "self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.RED, self.api.ExtendedColor.PURPLE) reader =", "def testReadWrongPurpleConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, self.api.ExtendedColor.LIME) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition)", "= self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE)", "structureConstraints = self.api.StructureConstraints() with self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testReadWrongWhiteConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.BLACK,", "self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, readStructureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE,", "testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED, self.api.BasicColor.WHITE, self.api.ExtendedColor.PURPLE) reader = zserio.BitStreamReader(writer.byte_array, writer.bitposition) structureConstraints", "self.assertRaises(zserio.PythonRuntimeException): structureConstraints.read(reader) def testWriteCorrectConstraints(self): structureConstraints = self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints)", "self.api.StructureConstraints(self.api.BasicColor.BLACK, self.api.BasicColor.WHITE, True, self.api.ExtendedColor.PURPLE) bitBuffer = zserio.serialize(structureConstraints) readStructureConstraints = zserio.deserialize(self.api.StructureConstraints, bitBuffer) self.assertEqual(self.api.BasicColor.BLACK, readStructureConstraints.black_color)", "self.assertEqual(self.api.BasicColor.BLACK, structureConstraints.black_color) self.assertEqual(self.api.BasicColor.WHITE, structureConstraints.white_color) self.assertEqual(self.api.ExtendedColor.PURPLE, structureConstraints.purple_color) def testReadWrongBlackConstraint(self): writer = zserio.BitStreamWriter() self.__class__._write(writer, self.api.BasicColor.RED," ]
[ "in lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj", "pandas as pd from more_itertools import flatten import os os.chdir('notebook') fbgn2symbol = (", ".drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def", "v for k, v in lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol',", "'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT))", "= yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for k, v in lit_genes.items() ])) bm", "= ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False)", "lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for k, v in lit_genes.items() ]))", "df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna()", "bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1') get_lit('PS2') get_lit('PS3') get_lit('ECY') get_lit(\"CY1\") get_lit(\"CY2\") get_lit(\"TE\") get_lit(\"PC\")", "fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT", "0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') )", ") config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml'))", "v in lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1'])", "'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda", ") def get_lit(cluster): print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1') get_lit('PS2')", "import pandas as pd from more_itertools import flatten import os os.chdir('notebook') fbgn2symbol =", "keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster):", "config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all", "lit_genes_all = list(flatten([ v for k, v in lit_genes.items() ])) bm = (", "k, v in lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj',", "bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn',", "from more_itertools import flatten import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol'])", "for k, v in lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster',", "import yaml import pandas as pd from more_itertools import flatten import os os.chdir('notebook')", "= list(flatten([ v for k, v in lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather',", "import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config", "= yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all =", "pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER", "<= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster')", ".assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster) df = bm.get_group(cluster) return", "= bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1') get_lit('PS2') get_lit('PS3') get_lit('ECY') get_lit(\"CY1\") get_lit(\"CY2\") get_lit(\"TE\")", "df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster) df = bm.get_group(cluster)", "CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for k, v", "os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config =", "'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df:", ".to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes =", "df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1') get_lit('PS2') get_lit('PS3') get_lit('ECY') get_lit(\"CY1\") get_lit(\"CY2\")", "def get_lit(cluster): print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1') get_lit('PS2') get_lit('PS3')", "flatten import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] )", "lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <=", "df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster) df =", "CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v", ".groupby('cluster') ) def get_lit(cluster): print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1')", "])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01')", "( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True)", ".query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn')", "= ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT =", "config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for k,", "list(flatten([ v for k, v in lit_genes.items() ])) bm = ( pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn',", ".reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster)", "os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml'))", "get_lit(cluster): print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1') get_lit('PS2') get_lit('PS3') get_lit('ECY')", "yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([", ".set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes", ".assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster) df", "df.cluster.cat.reorder_categories(CLUSTER_ORDER)) .set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP')", "pd.read_feather('../output/seurat3-cluster-wf/combined_n3_biomarkers.feather', columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda", "'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER = config['cluster_order']", "more_itertools import flatten import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn')", "= config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for k, v in", "= config['cluster_annot'] CLUSTER_ORDER = config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for", ".set_index('FBgn') .groupby('cluster') ) def get_lit(cluster): print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS')", "print(cluster) df = bm.get_group(cluster) return df.reindex(lit_genes_all).dropna() get_lit('SP') get_lit('EPS') get_lit('PS1') get_lit('PS2') get_lit('PS3') get_lit('ECY') get_lit(\"CY1\")", "columns=['FBgn', 'gene_symbol', 'cluster', 'p_val_adj', 'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df:", "pd from more_itertools import flatten import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn',", "as pd from more_itertools import flatten import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather',", "config['cluster_order'] lit_genes = yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for k, v in lit_genes.items()", "( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot']", "columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol'] ) config = yaml.safe_load(open('../config/common.yaml')) CLUSTER_ANNOT = config['cluster_annot'] CLUSTER_ORDER =", "import flatten import os os.chdir('notebook') fbgn2symbol = ( pd.read_feather('../references/gene_annotation_dmel_r6-26.feather', columns=['FBgn', 'gene_symbol']) .set_index('FBgn') .to_dict()['gene_symbol']", "'pct.1']) .query('p_val_adj <= 0.01') .drop_duplicates(subset='FBgn', keep=False) .reset_index(drop=True) .assign(cluster=lambda df: df.cluster.cat.rename_categories(CLUSTER_ANNOT)) .assign(cluster=lambda df: df.cluster.cat.reorder_categories(CLUSTER_ORDER))", "yaml.safe_load(open('../config/literature_genes.yaml')) lit_genes_all = list(flatten([ v for k, v in lit_genes.items() ])) bm =", "yaml import pandas as pd from more_itertools import flatten import os os.chdir('notebook') fbgn2symbol" ]
[ "put the feeld headers = { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your", "191.25: print(\"Sør\") return \"South\" elif wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction", "requests # api module import json #Save as Json #Remmen location og altitude:", "print(\"Sørøst\") return \"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\") return \"South\" elif wind_from_direction <", "80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545'", "write_json_file(): \"\"\" Save data as json file \"\"\" with open('yr_data_complete_format.json', 'w') as f:", "from yr. Location Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format", "wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value of the total cloud cover", "print(\"Øst\") return \"East\" elif wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction <", "(data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the instant air temperature in", "m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value of the total", "\"80\" # url to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\"", "tell yr where the request is coming from. # NB! Find your user-agent", "data = response.json() def write_json_file(): \"\"\" Save data as json file \"\"\" with", "updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the instant air", "in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\" Wind speed", "def write_json_file(): \"\"\" Save data as json file \"\"\" with open('yr_data_complete_format.json', 'w') as", "json file \"\"\" with open('yr_data_complete_format.json', 'w') as f: json.dump(data, f) write_json_file() # TODO!", "\"South\" elif wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\")", "headers=headers) if response: print('Success!') else: print('An error has occurred.') data = response.json() def", "as f: json.dump(data, f) write_json_file() # TODO! If-Modified-Since def updated_time(): \"\"\" Time updated", "# get the json api response = requests.request(\"GET\", url, headers=headers) if response: print('Success!')", "\"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\") return \"South\" elif wind_from_direction < 236.25: print(\"Sørvest\")", "cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value giving a", "api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" + lat + \"&lon=\" +", "location og altitude: lat = \"59.1304\" lon = \"11.3546\" alt = \"80\" #", "or wind_from_direction < 11.25: print(\"Nord\") return \"North\" elif wind_from_direction < 56.25: print(\"Nordøst\") return", "Header to tell yr where the request is coming from. # NB! Find", "\"\"\" <NAME> json from yr. Location Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude:", "headers = { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent here\" }", "direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or wind_from_direction < 11.25:", "the feeld headers = { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent", "\"\"\" Time updated at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def", "https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests #", "= (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\" Wind speed in m/s \"\"\" wind_speed", "https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\"", "heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value giving", "\"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\") return \"East\" elif wind_from_direction < 146.25: print(\"Sørøst\")", "api response = requests.request(\"GET\", url, headers=headers) if response: print('Success!') else: print('An error has", "the instant air temperature in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def", "summary_1_hour(): \"\"\" String value giving a summary for +1 hour \"\"\" summary_1_hour =", "print(\"Sørvest\") return \"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\") return \"West\" elif wind_from_direction <", "'w') as f: json.dump(data, f) write_json_file() # TODO! If-Modified-Since def updated_time(): \"\"\" Time", "(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value giving a summary for", "\"East\" elif wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\")", "coming from. # NB! Find your user-agent and put the feeld headers =", "lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET", "= requests.request(\"GET\", url, headers=headers) if response: print('Success!') else: print('An error has occurred.') data", "hour in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\" Return", "wind from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or wind_from_direction", "api module import json #Save as Json #Remmen location og altitude: lat =", "\"North\" elif wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\")", "ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header 'Accept: application/json'", "146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\") return \"South\" elif wind_from_direction", "has occurred.') data = response.json() def write_json_file(): \"\"\" Save data as json file", "wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or wind_from_direction < 11.25: print(\"Nord\") return", "59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X", "< 11.25: print(\"Nord\") return \"North\" elif wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\" elif", "wind_speed(): \"\"\" Wind speed in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed #", "requests.request(\"GET\", url, headers=headers) if response: print('Success!') else: print('An error has occurred.') data =", "with open('yr_data_complete_format.json', 'w') as f: json.dump(data, f) write_json_file() # TODO! If-Modified-Since def updated_time():", "cloud cover at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour():", "String value giving a summary for +1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return", "Time updated at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature():", "request is coming from. # NB! Find your user-agent and put the feeld", "'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api module import json #Save as Json #Remmen", "open('yr_data_complete_format.json', 'w') as f: json.dump(data, f) write_json_file() # TODO! If-Modified-Since def updated_time(): \"\"\"", "is coming from. # NB! Find your user-agent and put the feeld headers", "lon # Header to tell yr where the request is coming from. #", "to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" + lat +", "> 326.25 or wind_from_direction < 11.25: print(\"Nord\") return \"North\" elif wind_from_direction < 56.25:", "return \"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\") return \"West\" elif wind_from_direction < 326.25:", "+ \"&lat=\" + lat + \"&lon=\" + lon # Header to tell yr", "json #Save as Json #Remmen location og altitude: lat = \"59.1304\" lon =", "\"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" + lat + \"&lon=\" + lon # Header", "elif wind_from_direction < 281.25: print(\"Vest\") return \"West\" elif wind_from_direction < 326.25: print(\"Nordvest\") return", "yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" + lat + \"&lon=\"", "return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the instant air temperature in celsius", "for +1 hour in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction():", "= \"80\" # url to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt +", "speed in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value of", "all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value", "56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\") return \"East\" elif wind_from_direction", "+ lon # Header to tell yr where the request is coming from.", "return \"East\" elif wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction < 191.25:", "your user-agent and put the feeld headers = { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\",", "Find your user-agent and put the feeld headers = { \"Content-type\": \"application/json\", \"Cache-Control\":", "< 281.25: print(\"Vest\") return \"West\" elif wind_from_direction < 326.25: print(\"Nordvest\") return \"Northwest\" #print(wind_direction())", "lat: 59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl", "= (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or wind_from_direction < 11.25: print(\"Nord\") return \"North\"", "the json api response = requests.request(\"GET\", url, headers=headers) if response: print('Success!') else: print('An", "\"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent here\" } # get the", "<NAME> json from yr. Location Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude: ca.", "= (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for +1 hour in mm", "+ lat + \"&lon=\" + lon # Header to tell yr where the", "return \"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\") return \"East\" elif wind_from_direction < 146.25:", "giving a summary for +1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def", "(data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for +1 hour in mm \"\"\"", "\"\"\" Return the wind from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction >", "+ \"&lon=\" + lon # Header to tell yr where the request is", "temperature in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\" Wind", "def updated_time(): \"\"\" Time updated at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated", "#Save as Json #Remmen location og altitude: lat = \"59.1304\" lon = \"11.3546\"", "user-agent and put the feeld headers = { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\":", "\"\"\" Save data as json file \"\"\" with open('yr_data_complete_format.json', 'w') as f: json.dump(data,", "your user-agent here\" } # get the json api response = requests.request(\"GET\", url,", "\"\"\" Return the instant air temperature in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return", "Precentage value of the total cloud cover at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"])", "326.25 or wind_from_direction < 11.25: print(\"Nord\") return \"North\" elif wind_from_direction < 56.25: print(\"Nordøst\")", "curl -X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api module", "print(\"Nordøst\") return \"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\") return \"East\" elif wind_from_direction <", "# api module import json #Save as Json #Remmen location og altitude: lat", "file \"\"\" with open('yr_data_complete_format.json', 'w') as f: json.dump(data, f) write_json_file() # TODO! If-Modified-Since", "\"\"\" Precipitation for +1 hour in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour", "application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api module import json #Save as Json", "Location Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api:", "#Remmen location og altitude: lat = \"59.1304\" lon = \"11.3546\" alt = \"80\"", "where the request is coming from. # NB! Find your user-agent and put", "import requests # api module import json #Save as Json #Remmen location og", "return air_temp def wind_speed(): \"\"\" Wind speed in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"])", "feeld headers = { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent here\"", "value of the total cloud cover at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity", "og altitude: lat = \"59.1304\" lon = \"11.3546\" alt = \"80\" # url", "elif wind_from_direction < 101.25: print(\"Øst\") return \"East\" elif wind_from_direction < 146.25: print(\"Sørøst\") return", "101.25: print(\"Øst\") return \"East\" elif wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction", "wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\") return \"South\"", "rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value giving a summary for +1", "updated_time(): \"\"\" Time updated at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"])", "from. # NB! Find your user-agent and put the feeld headers = {", "air_temp def wind_speed(): \"\"\" Wind speed in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return", "def air_temperature(): \"\"\" Return the instant air temperature in celsius \"\"\" air_temp =", "mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\" Return the wind", "wind_from_direction < 281.25: print(\"Vest\") return \"West\" elif wind_from_direction < 326.25: print(\"Nordvest\") return \"Northwest\"", "11.25: print(\"Nord\") return \"North\" elif wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction", "json.dump(data, f) write_json_file() # TODO! If-Modified-Since def updated_time(): \"\"\" Time updated at yr", "api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests", "and put the feeld headers = { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put", "response.json() def write_json_file(): \"\"\" Save data as json file \"\"\" with open('yr_data_complete_format.json', 'w')", "\"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\" Return the wind from", "yr where the request is coming from. # NB! Find your user-agent and", "\"\"\" String value giving a summary for +1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"])", "Return the instant air temperature in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp", "print(\"Sør\") return \"South\" elif wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction <", "else: print('An error has occurred.') data = response.json() def write_json_file(): \"\"\" Save data", "data as json file \"\"\" with open('yr_data_complete_format.json', 'w') as f: json.dump(data, f) write_json_file()", "= (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the instant air temperature", "+1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for", "elif wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\") return", "summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for +1 hour in mm \"\"\" precipitation_1_hour =", "lat = \"59.1304\" lon = \"11.3546\" alt = \"80\" # url to yr", "precipitation_1_hour(): \"\"\" Precipitation for +1 hour in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return", "the wind from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or", "Return the wind from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25", "wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\") return \"West\"", "\"\"\" with open('yr_data_complete_format.json', 'w') as f: json.dump(data, f) write_json_file() # TODO! If-Modified-Since def", "response: print('Success!') else: print('An error has occurred.') data = response.json() def write_json_file(): \"\"\"", "= \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" + lat + \"&lon=\" + lon #", "wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\") return \"East\"", "'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api module import json #Save as", "to tell yr where the request is coming from. # NB! Find your", "print('Success!') else: print('An error has occurred.') data = response.json() def write_json_file(): \"\"\" Save", "yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the", "altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header 'Accept:", "celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\" Wind speed in", "def summary_1_hour(): \"\"\" String value giving a summary for +1 hour \"\"\" summary_1_hour", "write_json_file() # TODO! If-Modified-Since def updated_time(): \"\"\" Time updated at yr \"\"\" updated", "json api response = requests.request(\"GET\", url, headers=headers) if response: print('Success!') else: print('An error", "# NB! Find your user-agent and put the feeld headers = { \"Content-type\":", "# TODO! If-Modified-Since def updated_time(): \"\"\" Time updated at yr \"\"\" updated =", "# Precentage value of the total cloud cover at all heights cloud_area =", "Wind speed in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value", "Save data as json file \"\"\" with open('yr_data_complete_format.json', 'w') as f: json.dump(data, f)", "Precipitation for +1 hour in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def", "#print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the instant air temperature in celsius \"\"\" air_temp", "= \"11.3546\" alt = \"80\" # url to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\"", "url to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" + lat", "def wind_direction(): \"\"\" Return the wind from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if", "as json file \"\"\" with open('yr_data_complete_format.json', 'w') as f: json.dump(data, f) write_json_file() #", "Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950", "\"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the instant", "summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for +1 hour in", "(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\" Wind speed in m/s \"\"\" wind_speed =", "response = requests.request(\"GET\", url, headers=headers) if response: print('Success!') else: print('An error has occurred.')", "in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\" Return the", "precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\" Return the wind from direction", "wind_direction(): \"\"\" Return the wind from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction", "return \"South\" elif wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction < 281.25:", "the request is coming from. # NB! Find your user-agent and put the", "f: json.dump(data, f) write_json_file() # TODO! If-Modified-Since def updated_time(): \"\"\" Time updated at", "of the total cloud cover at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity =", "a summary for +1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour():", "the total cloud cover at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"])", "if response: print('Success!') else: print('An error has occurred.') data = response.json() def write_json_file():", "\"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for +1 hour", "air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\" Wind speed in m/s \"\"\"", "value giving a summary for +1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour", "< 236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\") return \"West\" elif", "# Header to tell yr where the request is coming from. # NB!", "here\" } # get the json api response = requests.request(\"GET\", url, headers=headers) if", "If-Modified-Since def updated_time(): \"\"\" Time updated at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return", "summary for +1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\"", "< 101.25: print(\"Øst\") return \"East\" elif wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\" elif", "GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api module import json", "user-agent here\" } # get the json api response = requests.request(\"GET\", url, headers=headers)", "Halden: lat: 59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl:", "+ alt + \"&lat=\" + lat + \"&lon=\" + lon # Header to", "\"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent here\" } # get the json api", "in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value of the", "< 191.25: print(\"Sør\") return \"South\" elif wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\" elif", "elif wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\") return", "} # get the json api response = requests.request(\"GET\", url, headers=headers) if response:", "return \"North\" elif wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction < 101.25:", "\"&lat=\" + lat + \"&lon=\" + lon # Header to tell yr where", "json from yr. Location Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude: ca. 80", "cover at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\"", "elif wind_from_direction < 191.25: print(\"Sør\") return \"South\" elif wind_from_direction < 236.25: print(\"Sørvest\") return", "(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value giving a summary for +1 hour \"\"\"", "\"&lon=\" + lon # Header to tell yr where the request is coming", "altitude: lat = \"59.1304\" lon = \"11.3546\" alt = \"80\" # url to", "\"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value of the total cloud", "return \"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\") return \"South\" elif wind_from_direction < 236.25:", "(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value of the total cloud cover at all", "\"Put your user-agent here\" } # get the json api response = requests.request(\"GET\",", "wind_from_direction < 191.25: print(\"Sør\") return \"South\" elif wind_from_direction < 236.25: print(\"Sørvest\") return \"Southwest\"", "NB! Find your user-agent and put the feeld headers = { \"Content-type\": \"application/json\",", "air_temperature(): \"\"\" Return the instant air temperature in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"])", "< 146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\") return \"South\" elif", "import json #Save as Json #Remmen location og altitude: lat = \"59.1304\" lon", "return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for +1 hour in mm \"\"\" precipitation_1_hour", "\"59.1304\" lon = \"11.3546\" alt = \"80\" # url to yr api url", "\"user-agent\": \"Put your user-agent here\" } # get the json api response =", "request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import", "# url to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" +", "at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return", "hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation for +1", "wind_speed # Precentage value of the total cloud cover at all heights cloud_area", "\"\"\" import requests # api module import json #Save as Json #Remmen location", "lon = \"11.3546\" alt = \"80\" # url to yr api url =", "curl: curl -X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api", "updated at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"]) return updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\"", "Json #Remmen location og altitude: lat = \"59.1304\" lon = \"11.3546\" alt =", "lat + \"&lon=\" + lon # Header to tell yr where the request", "+1 hour in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\"", "wind_from_direction < 101.25: print(\"Øst\") return \"East\" elif wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\"", "precipitation_1_hour def wind_direction(): \"\"\" Return the wind from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"])", "elif wind_from_direction < 146.25: print(\"Sørøst\") return \"Southeast\" elif wind_from_direction < 191.25: print(\"Sør\") return", "print(\"Nord\") return \"North\" elif wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction <", "get the json api response = requests.request(\"GET\", url, headers=headers) if response: print('Success!') else:", "TODO! If-Modified-Since def updated_time(): \"\"\" Time updated at yr \"\"\" updated = (data[\"properties\"][\"meta\"][\"updated_at\"])", "236.25: print(\"Sørvest\") return \"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\") return \"West\" elif wind_from_direction", "= \"59.1304\" lon = \"11.3546\" alt = \"80\" # url to yr api", "for +1 hour \"\"\" summary_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"summary\"][\"symbol_code\"]) return summary_1_hour def precipitation_1_hour(): \"\"\" Precipitation", "\"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\" Wind speed in m/s", "--header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api module import json #Save", "= (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value giving a summary", "= (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage value of the total cloud cover at", "yr. Location Remmen, Halden: lat: 59.1304, lon: 11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request", "print('An error has occurred.') data = response.json() def write_json_file(): \"\"\" Save data as", "module import json #Save as Json #Remmen location og altitude: lat = \"59.1304\"", "total cloud cover at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def", "11.3546, altitude: ca. 80 https://api.met.no/weatherapi/locationforecast/2.0/#!/data/get_compact_format request api: https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=63.4305&lon=10.3950 curl: curl -X GET --header", "at all heights cloud_area = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"cloud_area_fraction\"]) rel_humidity = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String", "= (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\" Return the wind from direction \"\"\"", "wind_from_direction < 11.25: print(\"Nord\") return \"North\" elif wind_from_direction < 56.25: print(\"Nordøst\") return \"Northeast\"", "as Json #Remmen location og altitude: lat = \"59.1304\" lon = \"11.3546\" alt", "< 56.25: print(\"Nordøst\") return \"Northeast\" elif wind_from_direction < 101.25: print(\"Øst\") return \"East\" elif", "(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or wind_from_direction < 11.25: print(\"Nord\") return \"North\" elif", "{ \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent here\" } # get", "updated #print(data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"]) def air_temperature(): \"\"\" Return the instant air temperature in celsius \"\"\"", "(data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"]) return precipitation_1_hour def wind_direction(): \"\"\" Return the wind from direction \"\"\" wind_from_direction", "error has occurred.') data = response.json() def write_json_file(): \"\"\" Save data as json", "instant air temperature in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed():", "alt = \"80\" # url to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt", "\"11.3546\" alt = \"80\" # url to yr api url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" +", "url = \"https://api.met.no/weatherapi/locationforecast/2.0/complete.json?altitude=\" + alt + \"&lat=\" + lat + \"&lon=\" + lon", "def wind_speed(): \"\"\" Wind speed in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed", "f) write_json_file() # TODO! If-Modified-Since def updated_time(): \"\"\" Time updated at yr \"\"\"", "= { \"Content-type\": \"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent here\" } #", "return wind_speed # Precentage value of the total cloud cover at all heights", "from direction \"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or wind_from_direction <", "-X GET --header 'Accept: application/json' 'https://api.met.no/weatherapi/locationforecast/2.0/compact?altitude=80&lat=59.1304&lon=11.3545' \"\"\" import requests # api module import", "\"application/json\", \"Cache-Control\": \"no-cache\", \"user-agent\": \"Put your user-agent here\" } # get the json", "air temperature in celsius \"\"\" air_temp = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"air_temperature\"]) return air_temp def wind_speed(): \"\"\"", "\"\"\" Wind speed in m/s \"\"\" wind_speed = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_speed\"]) return wind_speed # Precentage", "url, headers=headers) if response: print('Success!') else: print('An error has occurred.') data = response.json()", "def precipitation_1_hour(): \"\"\" Precipitation for +1 hour in mm \"\"\" precipitation_1_hour = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"next_1_hours\"][\"details\"][\"precipitation_amount\"])", "wind_from_direction > 326.25 or wind_from_direction < 11.25: print(\"Nord\") return \"North\" elif wind_from_direction <", "\"Southwest\" elif wind_from_direction < 281.25: print(\"Vest\") return \"West\" elif wind_from_direction < 326.25: print(\"Nordvest\")", "alt + \"&lat=\" + lat + \"&lon=\" + lon # Header to tell", "= (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"relative_humidity\"]) def summary_1_hour(): \"\"\" String value giving a summary for +1 hour", "occurred.') data = response.json() def write_json_file(): \"\"\" Save data as json file \"\"\"", "= response.json() def write_json_file(): \"\"\" Save data as json file \"\"\" with open('yr_data_complete_format.json',", "return precipitation_1_hour def wind_direction(): \"\"\" Return the wind from direction \"\"\" wind_from_direction =", "if wind_from_direction > 326.25 or wind_from_direction < 11.25: print(\"Nord\") return \"North\" elif wind_from_direction", "\"no-cache\", \"user-agent\": \"Put your user-agent here\" } # get the json api response", "\"\"\" wind_from_direction = (data[\"properties\"][\"timeseries\"][0][\"data\"][\"instant\"][\"details\"][\"wind_from_direction\"]) if wind_from_direction > 326.25 or wind_from_direction < 11.25: print(\"Nord\")" ]
[ "<filename>python/testData/resolve/ObjectMethods.py class A: x = 1 y = 1 class B(A): def foo(self):", "class A: x = 1 y = 1 class B(A): def foo(self): self.__repr__()", "x = 1 y = 1 class B(A): def foo(self): self.__repr__() # <ref>", "A: x = 1 y = 1 class B(A): def foo(self): self.__repr__() #" ]
[ "is an anagram of s. Example 1: Input: s = \"anagram\", t =", "s. Example 1: Input: s = \"anagram\", t = \"nagaram\" Output: true Example", "a large size array to fit the entire range of unicode characters, which", "= 1 else: d[c] += 1 for c in t: if c not", "go up to more than 1 million. A hash table is a more", "Sorting. Sort two strings, if t is an anagram of s, they will", "s, t): \"\"\" :type s: str :type t: str :rtype: bool \"\"\" d", "of chars in t is the same with its in s, as well", "# Hash Table # Time-O(N), because accessing the counter table is a constant", "t is the same with its in s, as well as quantity. 2.", "Example 2: Input: s = \"rat\", t = \"car\" Output: false Follow up:", "O(1)O(1) because the table's size stays constant no matter how large nn is.", "to more than 1 million. A hash table is a more generic solution", "well as quantity. 2. Sorting. Sort two strings, if t is an anagram", "of unicode characters, which could go up to more than 1 million. A", "False else: d[c] -= 1 for k,v in d.items(): if v != 0:", "How would you adapt your solution to such case? Use a hash table", "1 for c in t: if c not in d: return False else:", "use extra space, the space complexity is O(1)O(1) because the table's size stays", "Input: s = \"anagram\", t = \"nagaram\" Output: true Example 2: Input: s", "t): \"\"\" :type s: str :type t: str :rtype: bool \"\"\" d =", "the entire range of unicode characters, which could go up to more than", "space, the space complexity is O(1)O(1) because the table's size stays constant no", "if c not in d: return False else: d[c] -= 1 for k,v", "str :rtype: bool \"\"\" d = dict() for c in s: if c", "= dict() for c in s: if c not in d: d[c] =", "size array to fit the entire range of unicode characters, which could go", "Solution(object): def isAnagram(self, s, t): \"\"\" :type s: str :type t: str :rtype:", "write a function to determine if t is an anagram of s. Example", "class Solution(object): def isAnagram(self, s, t): \"\"\" :type s: str :type t: str", "Hash Table. Anagram means the type of chars in t is the same", "the space complexity is O(1)O(1) because the table's size stays constant no matter", "solution to such case? Use a hash table instead of a fixed size", "more than 1 million. A hash table is a more generic solution and", "Sort two strings, if t is an anagram of s, they will be", "Follow up: What if the inputs contain unicode characters? How would you adapt", "\"\"\" :type s: str :type t: str :rtype: bool \"\"\" d = dict()", "A hash table is a more generic solution and could adapt to any", "because accessing the counter table is a constant time operation. # Space-O(1), Although", "Although we do use extra space, the space complexity is O(1)O(1) because the", "contain unicode characters? How would you adapt your solution to such case? Use", "the same with its in s, as well as quantity. 2. Sorting. Sort", "= \"rat\", t = \"car\" Output: false Follow up: What if the inputs", "table instead of a fixed size counter. Imagine allocating a large size array", "unicode characters, which could go up to more than 1 million. A hash", "means the type of chars in t is the same with its in", "is the same with its in s, as well as quantity. 2. Sorting.", "million. A hash table is a more generic solution and could adapt to", "Table # Time-O(N), because accessing the counter table is a constant time operation.", "adapt your solution to such case? Use a hash table instead of a", "+= 1 for c in t: if c not in d: return False", "s, they will be identical. \"\"\" # Hash Table # Time-O(N), because accessing", "Solution: 1. Hash Table. Anagram means the type of chars in t is", "1 else: d[c] += 1 for c in t: if c not in", "the inputs contain unicode characters? How would you adapt your solution to such", "accessing the counter table is a constant time operation. # Space-O(1), Although we", "Anagram means the type of chars in t is the same with its", "size stays constant no matter how large nn is. class Solution(object): def isAnagram(self,", "adapt to any range of characters. Solution: 1. Hash Table. Anagram means the", "bool \"\"\" d = dict() for c in s: if c not in", "dict() for c in s: if c not in d: d[c] = 1", "2: Input: s = \"rat\", t = \"car\" Output: false Follow up: What", "a fixed size counter. Imagine allocating a large size array to fit the", "with its in s, as well as quantity. 2. Sorting. Sort two strings,", "table's size stays constant no matter how large nn is. class Solution(object): def", "as quantity. 2. Sorting. Sort two strings, if t is an anagram of", "identical. \"\"\" # Hash Table # Time-O(N), because accessing the counter table is", ":rtype: bool \"\"\" d = dict() for c in s: if c not", "strings s and t , write a function to determine if t is", "large nn is. class Solution(object): def isAnagram(self, s, t): \"\"\" :type s: str", "the counter table is a constant time operation. # Space-O(1), Although we do", "Imagine allocating a large size array to fit the entire range of unicode", "t , write a function to determine if t is an anagram of", "1. Hash Table. Anagram means the type of chars in t is the", "is O(1)O(1) because the table's size stays constant no matter how large nn", "for c in s: if c not in d: d[c] = 1 else:", "nn is. class Solution(object): def isAnagram(self, s, t): \"\"\" :type s: str :type", "d[c] -= 1 for k,v in d.items(): if v != 0: return False", "1 for k,v in d.items(): if v != 0: return False return True", "inputs contain unicode characters? How would you adapt your solution to such case?", "large size array to fit the entire range of unicode characters, which could", "t = \"car\" Output: false Follow up: What if the inputs contain unicode", "do use extra space, the space complexity is O(1)O(1) because the table's size", "its in s, as well as quantity. 2. Sorting. Sort two strings, if", "range of unicode characters, which could go up to more than 1 million.", "size counter. Imagine allocating a large size array to fit the entire range", "table is a more generic solution and could adapt to any range of", "you adapt your solution to such case? Use a hash table instead of", "operation. # Space-O(1), Although we do use extra space, the space complexity is", "true Example 2: Input: s = \"rat\", t = \"car\" Output: false Follow", "such case? Use a hash table instead of a fixed size counter. Imagine", "hash table is a more generic solution and could adapt to any range", "d = dict() for c in s: if c not in d: d[c]", "up to more than 1 million. A hash table is a more generic", "than 1 million. A hash table is a more generic solution and could", "2. Sorting. Sort two strings, if t is an anagram of s, they", "c in t: if c not in d: return False else: d[c] -=", "and could adapt to any range of characters. Solution: 1. Hash Table. Anagram", "if the inputs contain unicode characters? How would you adapt your solution to", "quantity. 2. Sorting. Sort two strings, if t is an anagram of s,", "space complexity is O(1)O(1) because the table's size stays constant no matter how", "if t is an anagram of s, they will be identical. \"\"\" #", "s = \"rat\", t = \"car\" Output: false Follow up: What if the", "constant no matter how large nn is. class Solution(object): def isAnagram(self, s, t):", "else: d[c] += 1 for c in t: if c not in d:", "of a fixed size counter. Imagine allocating a large size array to fit", "because the table's size stays constant no matter how large nn is. class", "determine if t is an anagram of s. Example 1: Input: s =", "= \"nagaram\" Output: true Example 2: Input: s = \"rat\", t = \"car\"", "a hash table instead of a fixed size counter. Imagine allocating a large", "to such case? Use a hash table instead of a fixed size counter.", "range of characters. Solution: 1. Hash Table. Anagram means the type of chars", "\"car\" Output: false Follow up: What if the inputs contain unicode characters? How", "the type of chars in t is the same with its in s,", "solution and could adapt to any range of characters. Solution: 1. Hash Table.", "of s, they will be identical. \"\"\" # Hash Table # Time-O(N), because", "any range of characters. Solution: 1. Hash Table. Anagram means the type of", "we do use extra space, the space complexity is O(1)O(1) because the table's", "anagram of s, they will be identical. \"\"\" # Hash Table # Time-O(N),", "anagram of s. Example 1: Input: s = \"anagram\", t = \"nagaram\" Output:", "Hash Table # Time-O(N), because accessing the counter table is a constant time", "entire range of unicode characters, which could go up to more than 1", "isAnagram(self, s, t): \"\"\" :type s: str :type t: str :rtype: bool \"\"\"", "= \"anagram\", t = \"nagaram\" Output: true Example 2: Input: s = \"rat\",", "unicode characters? How would you adapt your solution to such case? Use a", "in s: if c not in d: d[c] = 1 else: d[c] +=", "1 million. A hash table is a more generic solution and could adapt", "= \"car\" Output: false Follow up: What if the inputs contain unicode characters?", "chars in t is the same with its in s, as well as", "-= 1 for k,v in d.items(): if v != 0: return False return", "and t , write a function to determine if t is an anagram", "a constant time operation. # Space-O(1), Although we do use extra space, the", ", write a function to determine if t is an anagram of s.", "is a constant time operation. # Space-O(1), Although we do use extra space,", "Space-O(1), Although we do use extra space, the space complexity is O(1)O(1) because", "\"\"\" Given two strings s and t , write a function to determine", "t is an anagram of s. Example 1: Input: s = \"anagram\", t", ":type t: str :rtype: bool \"\"\" d = dict() for c in s:", "which could go up to more than 1 million. A hash table is", "a more generic solution and could adapt to any range of characters. Solution:", "characters? How would you adapt your solution to such case? Use a hash", "s: str :type t: str :rtype: bool \"\"\" d = dict() for c", "in d: return False else: d[c] -= 1 for k,v in d.items(): if", "Given two strings s and t , write a function to determine if", "# Space-O(1), Although we do use extra space, the space complexity is O(1)O(1)", "characters. Solution: 1. Hash Table. Anagram means the type of chars in t", "# Time-O(N), because accessing the counter table is a constant time operation. #", "same with its in s, as well as quantity. 2. Sorting. Sort two", "to any range of characters. Solution: 1. Hash Table. Anagram means the type", "for c in t: if c not in d: return False else: d[c]", "else: d[c] -= 1 for k,v in d.items(): if v != 0: return", "could go up to more than 1 million. A hash table is a", "d[c] += 1 for c in t: if c not in d: return", "false Follow up: What if the inputs contain unicode characters? How would you", "be identical. \"\"\" # Hash Table # Time-O(N), because accessing the counter table", "of s. Example 1: Input: s = \"anagram\", t = \"nagaram\" Output: true", "Output: true Example 2: Input: s = \"rat\", t = \"car\" Output: false", "d: return False else: d[c] -= 1 for k,v in d.items(): if v", "function to determine if t is an anagram of s. Example 1: Input:", "Output: false Follow up: What if the inputs contain unicode characters? How would", "stays constant no matter how large nn is. class Solution(object): def isAnagram(self, s,", "\"\"\" d = dict() for c in s: if c not in d:", "how large nn is. class Solution(object): def isAnagram(self, s, t): \"\"\" :type s:", "type of chars in t is the same with its in s, as", "two strings s and t , write a function to determine if t", "s = \"anagram\", t = \"nagaram\" Output: true Example 2: Input: s =", "s: if c not in d: d[c] = 1 else: d[c] += 1", "s and t , write a function to determine if t is an", "strings, if t is an anagram of s, they will be identical. \"\"\"", "complexity is O(1)O(1) because the table's size stays constant no matter how large", "no matter how large nn is. class Solution(object): def isAnagram(self, s, t): \"\"\"", "table is a constant time operation. # Space-O(1), Although we do use extra", "not in d: return False else: d[c] -= 1 for k,v in d.items():", "is. class Solution(object): def isAnagram(self, s, t): \"\"\" :type s: str :type t:", "extra space, the space complexity is O(1)O(1) because the table's size stays constant", "to determine if t is an anagram of s. Example 1: Input: s", "array to fit the entire range of unicode characters, which could go up", "in t is the same with its in s, as well as quantity.", "matter how large nn is. class Solution(object): def isAnagram(self, s, t): \"\"\" :type", "if c not in d: d[c] = 1 else: d[c] += 1 for", "more generic solution and could adapt to any range of characters. Solution: 1.", "Input: s = \"rat\", t = \"car\" Output: false Follow up: What if", "fixed size counter. Imagine allocating a large size array to fit the entire", "is a more generic solution and could adapt to any range of characters.", "they will be identical. \"\"\" # Hash Table # Time-O(N), because accessing the", "constant time operation. # Space-O(1), Although we do use extra space, the space", "Example 1: Input: s = \"anagram\", t = \"nagaram\" Output: true Example 2:", "instead of a fixed size counter. Imagine allocating a large size array to", "in d: d[c] = 1 else: d[c] += 1 for c in t:", "the table's size stays constant no matter how large nn is. class Solution(object):", "up: What if the inputs contain unicode characters? How would you adapt your", "as well as quantity. 2. Sorting. Sort two strings, if t is an", "d[c] = 1 else: d[c] += 1 for c in t: if c", "would you adapt your solution to such case? Use a hash table instead", "counter. Imagine allocating a large size array to fit the entire range of", "Time-O(N), because accessing the counter table is a constant time operation. # Space-O(1),", "a function to determine if t is an anagram of s. Example 1:", "c not in d: return False else: d[c] -= 1 for k,v in", "allocating a large size array to fit the entire range of unicode characters,", "of characters. Solution: 1. Hash Table. Anagram means the type of chars in", "\"anagram\", t = \"nagaram\" Output: true Example 2: Input: s = \"rat\", t", "if t is an anagram of s. Example 1: Input: s = \"anagram\",", "Table. Anagram means the type of chars in t is the same with", "hash table instead of a fixed size counter. Imagine allocating a large size", "t: if c not in d: return False else: d[c] -= 1 for", "case? Use a hash table instead of a fixed size counter. Imagine allocating", "to fit the entire range of unicode characters, which could go up to", "1: Input: s = \"anagram\", t = \"nagaram\" Output: true Example 2: Input:", ":type s: str :type t: str :rtype: bool \"\"\" d = dict() for", "in t: if c not in d: return False else: d[c] -= 1", "time operation. # Space-O(1), Although we do use extra space, the space complexity", "counter table is a constant time operation. # Space-O(1), Although we do use", "fit the entire range of unicode characters, which could go up to more", "generic solution and could adapt to any range of characters. Solution: 1. Hash", "s, as well as quantity. 2. Sorting. Sort two strings, if t is", "\"nagaram\" Output: true Example 2: Input: s = \"rat\", t = \"car\" Output:", "is an anagram of s, they will be identical. \"\"\" # Hash Table", "in s, as well as quantity. 2. Sorting. Sort two strings, if t", "c not in d: d[c] = 1 else: d[c] += 1 for c", "not in d: d[c] = 1 else: d[c] += 1 for c in", "t is an anagram of s, they will be identical. \"\"\" # Hash", "will be identical. \"\"\" # Hash Table # Time-O(N), because accessing the counter", "def isAnagram(self, s, t): \"\"\" :type s: str :type t: str :rtype: bool", "your solution to such case? Use a hash table instead of a fixed", "an anagram of s. Example 1: Input: s = \"anagram\", t = \"nagaram\"", "str :type t: str :rtype: bool \"\"\" d = dict() for c in", "c in s: if c not in d: d[c] = 1 else: d[c]", "return False else: d[c] -= 1 for k,v in d.items(): if v !=", "d: d[c] = 1 else: d[c] += 1 for c in t: if", "t: str :rtype: bool \"\"\" d = dict() for c in s: if", "two strings, if t is an anagram of s, they will be identical.", "characters, which could go up to more than 1 million. A hash table", "\"rat\", t = \"car\" Output: false Follow up: What if the inputs contain", "Use a hash table instead of a fixed size counter. Imagine allocating a", "an anagram of s, they will be identical. \"\"\" # Hash Table #", "\"\"\" # Hash Table # Time-O(N), because accessing the counter table is a", "t = \"nagaram\" Output: true Example 2: Input: s = \"rat\", t =", "could adapt to any range of characters. Solution: 1. Hash Table. Anagram means", "What if the inputs contain unicode characters? How would you adapt your solution" ]
[ "None def convert_rank(): none = None def get_current_leaders(): none = None def get_unwritten_leaderboard_games():", "none = None def get_file_entries(): none = None def get_drive_filetype(): none = None", "for test use only\"\"\" class MockDriveManager(): \"\"\"Mocked out version of DriveManager for test", "None def get_file_entries(): none = None def get_drive_filetype(): none = None def get_all_books_sheets():", "convert_rank(): none = None def get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none =", "class MockDriveManager(): \"\"\"Mocked out version of DriveManager for test purposes\"\"\" def __init__(self): none", "update_answerkey_results(): none = None def update_game_start_time(): none = None def create_new_sheet(): none =", "def get_history_game_points(): none = None def overwrite_leaderboard(): none = None def update_answerkey_results(): none", "none = None def overwrite_leaderboard(): none = None def update_answerkey_results(): none = None", "def get_file_entries(): none = None def get_drive_filetype(): none = None def get_all_books_sheets(): none", "a mocked out GDocs dependency. This is for test use only\"\"\" class MockDriveManager():", "= None def get_file_entries(): none = None def get_drive_filetype(): none = None def", "This is for test use only\"\"\" class MockDriveManager(): \"\"\"Mocked out version of DriveManager", "get_games_result(): none = None def convert_rank(): none = None def get_current_leaders(): none =", "for test purposes\"\"\" def __init__(self): none = None def get_file_entries(): none = None", "None def update_answerkey_results(): none = None def update_game_start_time(): none = None def create_new_sheet():", "= None def get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none = None def", "\"\"\"Mocked out version of DriveManager for test purposes\"\"\" def __init__(self): none = None", "= None def get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none = None def", "none = None def get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none = None", "none = None def get_all_books_sheets(): none = None def get_games_result(): none = None", "def update_game_start_time(): none = None def create_new_sheet(): none = None def new_response_data_available(): none", "MockDriveManager(): \"\"\"Mocked out version of DriveManager for test purposes\"\"\" def __init__(self): none =", "DriveManager for test purposes\"\"\" def __init__(self): none = None def get_file_entries(): none =", "containing the definition of a mocked out GDocs dependency. This is for test", "purposes\"\"\" def __init__(self): none = None def get_file_entries(): none = None def get_drive_filetype():", "get_drive_filetype(): none = None def get_all_books_sheets(): none = None def get_games_result(): none =", "update_game_start_time(): none = None def create_new_sheet(): none = None def new_response_data_available(): none =", "def get_games_result(): none = None def convert_rank(): none = None def get_current_leaders(): none", "= None def get_all_books_sheets(): none = None def get_games_result(): none = None def", "None def get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none = None def overwrite_leaderboard():", "None def get_drive_filetype(): none = None def get_all_books_sheets(): none = None def get_games_result():", "of DriveManager for test purposes\"\"\" def __init__(self): none = None def get_file_entries(): none", "GDocs dependency. This is for test use only\"\"\" class MockDriveManager(): \"\"\"Mocked out version", "= None def get_history_game_points(): none = None def overwrite_leaderboard(): none = None def", "get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none =", "get_all_books_sheets(): none = None def get_games_result(): none = None def convert_rank(): none =", "test purposes\"\"\" def __init__(self): none = None def get_file_entries(): none = None def", "none = None def convert_rank(): none = None def get_current_leaders(): none = None", "def get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none", "= None def update_game_start_time(): none = None def create_new_sheet(): none = None def", "mocked out GDocs dependency. This is for test use only\"\"\" class MockDriveManager(): \"\"\"Mocked", "def __init__(self): none = None def get_file_entries(): none = None def get_drive_filetype(): none", "= None def get_drive_filetype(): none = None def get_all_books_sheets(): none = None def", "none = None def create_new_sheet(): none = None def new_response_data_available(): none = None", "None def get_all_books_sheets(): none = None def get_games_result(): none = None def convert_rank():", "definition of a mocked out GDocs dependency. This is for test use only\"\"\"", "def overwrite_leaderboard(): none = None def update_answerkey_results(): none = None def update_game_start_time(): none", "only\"\"\" class MockDriveManager(): \"\"\"Mocked out version of DriveManager for test purposes\"\"\" def __init__(self):", "def get_drive_filetype(): none = None def get_all_books_sheets(): none = None def get_games_result(): none", "get_history_game_points(): none = None def overwrite_leaderboard(): none = None def update_answerkey_results(): none =", "use only\"\"\" class MockDriveManager(): \"\"\"Mocked out version of DriveManager for test purposes\"\"\" def", "None def get_history_game_points(): none = None def overwrite_leaderboard(): none = None def update_answerkey_results():", "none = None def update_answerkey_results(): none = None def update_game_start_time(): none = None", "= None def get_games_result(): none = None def convert_rank(): none = None def", "def get_all_books_sheets(): none = None def get_games_result(): none = None def convert_rank(): none", "None def get_games_result(): none = None def convert_rank(): none = None def get_current_leaders():", "none = None def get_history_game_points(): none = None def overwrite_leaderboard(): none = None", "none = None def update_game_start_time(): none = None def create_new_sheet(): none = None", "version of DriveManager for test purposes\"\"\" def __init__(self): none = None def get_file_entries():", "= None def update_answerkey_results(): none = None def update_game_start_time(): none = None def", "= None def overwrite_leaderboard(): none = None def update_answerkey_results(): none = None def", "\"\"\"Module containing the definition of a mocked out GDocs dependency. This is for", "get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none = None def overwrite_leaderboard(): none =", "out version of DriveManager for test purposes\"\"\" def __init__(self): none = None def", "dependency. This is for test use only\"\"\" class MockDriveManager(): \"\"\"Mocked out version of", "= None def convert_rank(): none = None def get_current_leaders(): none = None def", "__init__(self): none = None def get_file_entries(): none = None def get_drive_filetype(): none =", "test use only\"\"\" class MockDriveManager(): \"\"\"Mocked out version of DriveManager for test purposes\"\"\"", "none = None def get_games_result(): none = None def convert_rank(): none = None", "None def update_game_start_time(): none = None def create_new_sheet(): none = None def new_response_data_available():", "the definition of a mocked out GDocs dependency. This is for test use", "def get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none = None def overwrite_leaderboard(): none", "None def get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none = None def get_history_game_points():", "of a mocked out GDocs dependency. This is for test use only\"\"\" class", "none = None def get_unwritten_leaderboard_games(): none = None def get_history_game_points(): none = None", "overwrite_leaderboard(): none = None def update_answerkey_results(): none = None def update_game_start_time(): none =", "None def overwrite_leaderboard(): none = None def update_answerkey_results(): none = None def update_game_start_time():", "out GDocs dependency. This is for test use only\"\"\" class MockDriveManager(): \"\"\"Mocked out", "get_file_entries(): none = None def get_drive_filetype(): none = None def get_all_books_sheets(): none =", "def convert_rank(): none = None def get_current_leaders(): none = None def get_unwritten_leaderboard_games(): none", "def update_answerkey_results(): none = None def update_game_start_time(): none = None def create_new_sheet(): none", "is for test use only\"\"\" class MockDriveManager(): \"\"\"Mocked out version of DriveManager for", "none = None def get_drive_filetype(): none = None def get_all_books_sheets(): none = None" ]
[ "item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')), } method", "filter_lambda(entry, target_value)] return entry_list def gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def", "measurements = { 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')),", "or len(items) == 0 or kv is None: return False for item in", "'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item,", "present(self, criteria): return len(self.entries(criteria)) > 0 def not_present(self, criteria): return len(self.entries(criteria)) == 0", "'request.url', default=None)), url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda", "items is None or len(items) == 0 or rxp is None: return False", "item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]),", "'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item,", "self.rmatch(item['name'], kv['name']) if 'value'in kv: value = self.rmatch(kv['value'], item['value']) if name and value:", "matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def current_page(self): return self.har['log']['pages'][-1]['id'] # Use", "items is None or len(items) == 0 or kv is None: return False", "self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content':", "re from glom import glom import json from jsonschema import validate from jsonschema", "from jsonschema import ValidationError from jsonpath_ng import parse class HarVerifications: def __init__(self, har):", "item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)),", "'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item,", "har): self.har = har def rmatch(self, val, str_rxp): if val is None: return", "'response': (lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')),", "or len(items) == 0 or rxp is None: return False for item in", "items)) def present(self, criteria): return len(self.entries(criteria)) > 0 def not_present(self, criteria): return len(self.entries(criteria))", "json from jsonschema import validate from jsonschema import ValidationError from jsonpath_ng import parse", "glom import json from jsonschema import validate from jsonschema import ValidationError from jsonpath_ng", "har_entry_filters[filter_name] if filter_name == 'page' and target_value == 'current': target_value = self.current_page() entry_list", "== 0 or kv is None: return False for item in items: if", "return True return False def rmatch_key_val(self, items, kv): name = True value =", "len(matches) > 0 def current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom to dig into", "gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val): val = int(val)", "item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize')", "val): val = int(val) return 0 if val == -1 or val is", "+ self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda", "def current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom to dig into the har entries,", "ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item,", "item, path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val): val = int(val) return", "return 0 if val == -1 or val is None else val def", "None else val def measure(self, items, measurement): measurements = { 'request_headers': (lambda item:", "name and value: return True return False def schema_validate(self, item, schema): try: if", "schema): try: if type(item) == str: item = json.loads(item) validate(instance=item, schema=schema) except (ValidationError,", "'page': (lambda item, pgref: glom(item, 'pageref', default='') == pgref ), 'status': (lambda item,", "criteria=False): entry_list = self.har['log']['entries'] har_entry_filters = { 'page': (lambda item, pgref: glom(item, 'pageref',", "= { 'page': (lambda item, pgref: glom(item, 'pageref', default='') == pgref ), 'status':", "items, measurement): measurements = { 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item:", "item = json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError) as e: return False return", "False def schema_validate(self, item, schema): try: if type(item) == str: item = json.loads(item)", "'time': (lambda item: self.gsize(item, 'time')), } method = measurements[measurement] return list(map(method, items)) def", "if name and value: return True return False def schema_validate(self, item, schema): try:", "self.har = har def rmatch(self, val, str_rxp): if val is None: return False", "as e: return False return True def has_json_path(self, json_str, json_path): if self.valid_json(json_str) ==", "return False for item in items: if isinstance(item, bytes): item = str(item, \"utf-8\")", "0 if val == -1 or val is None else val def measure(self,", "str(item, \"utf-8\") if self.rmatch(item, rxp): return True return False def rmatch_key_val(self, items, kv):", "val, str_rxp): if val is None: return False if isinstance(val, bytes): val =", "= int(val) return 0 if val == -1 or val is None else", "None or len(items) == 0 or kv is None: return False for item", "entry_list = self.har['log']['entries'] har_entry_filters = { 'page': (lambda item, pgref: glom(item, 'pageref', default='')", "None: return False if isinstance(val, bytes): val = str(val, \"utf-8\") return re.search(str_rxp, val,", "default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item,", "ValueError as e: return False return True def has_json_path(self, json_str, json_path): if self.valid_json(json_str)", "item = str(item, \"utf-8\") if self.rmatch(item, rxp): return True return False def rmatch_key_val(self,", "item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)),", "item: self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda", "as e: return False return True def valid_json(self, item): if item is None:", "match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp:", "default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item,", "return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if items is None or", "if isinstance(val, bytes): val = str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self,", "> 0 def current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom to dig into the", "def gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val): val =", "to get down to an array of something or others # (headers, websocket", "if self.valid_json(json_str) == False: return False jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return", "rmatch_key_val(self, items, kv): name = True value = True if items is None", "self.current_page() entry_list = [entry for entry in entry_list if filter_lambda(entry, target_value)] return entry_list", "'current': target_value = self.current_page() entry_list = [entry for entry in entry_list if filter_lambda(entry,", "is None: return False if isinstance(val, bytes): val = str(val, \"utf-8\") return re.search(str_rxp,", "(lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item,", "self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie':", "name = True value = True if items is None or len(items) ==", "return self.har['log']['pages'][-1]['id'] # Use glom to dig into the har entries, responses and", "default=None)), path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name, target_value", "= self.current_page() entry_list = [entry for entry in entry_list if filter_lambda(entry, target_value)] return", "= parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def current_page(self): return self.har['log']['pages'][-1]['id']", "try: json.loads(item) except ValueError as e: return False return True def has_json_path(self, json_str,", "def rmatch_any(self, items, rxp): if items is None or len(items) == 0 or", "something or others # (headers, websocket messages, content then we execute our test", "pgref: glom(item, 'pageref', default='') == pgref ), 'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status',", "'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda item, _:", "len(self.entries(criteria)) == 0 def max(self, criteria, measurement_name): items = self.entries(criteria) return max(self.measure(items, measurement_name),", "if items is None or len(items) == 0 or kv is None: return", "if filter_lambda(entry, target_value)] return entry_list def gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize', default=0))", "= json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError) as e: return False return True", "return True def has_json_path(self, json_str, json_path): if self.valid_json(json_str) == False: return False jsonpath_expr", "to dig into the har entries, responses and websockets to get down to", "har entries, responses and websockets to get down to an array of something", "of something or others # (headers, websocket messages, content then we execute our", "str_rxp): if val is None: return False if isinstance(val, bytes): val = str(val,", "True value = True if items is None or len(items) == 0 or", "val is None else val def measure(self, items, measurement): measurements = { 'request_headers':", "return False for item in items: if 'name' in kv: name = self.rmatch(item['name'],", "val = int(val) return 0 if val == -1 or val is None", "default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp:", "(lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name, target_value in criteria.items(): filter_lambda", "max(self, criteria, measurement_name): items = self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria,", "'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda", "'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item:", "is None else val def measure(self, items, measurement): measurements = { 'request_headers': (lambda", "else val def measure(self, items, measurement): measurements = { 'request_headers': (lambda item: self.gsize(item,", "0 or rxp is None: return False for item in items: if isinstance(item,", "False def rmatch_key_val(self, items, kv): name = True value = True if items", "= str(item, \"utf-8\") if self.rmatch(item, rxp): return True return False def rmatch_key_val(self, items,", "return list(map(method, items)) def present(self, criteria): return len(self.entries(criteria)) > 0 def not_present(self, criteria):", "= True value = True if items is None or len(items) == 0", "item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']),", "self.har['log']['entries'] har_entry_filters = { 'page': (lambda item, pgref: glom(item, 'pageref', default='') == pgref", "return True return False def schema_validate(self, item, schema): try: if type(item) == str:", "'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda", "target_value = self.current_page() entry_list = [entry for entry in entry_list if filter_lambda(entry, target_value)]", "schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name, target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name]", "'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda", "self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name, target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name] if", "websocket messages, content then we execute our test against that item # current,", "match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)),", "self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda", "self.rmatch(item, rxp): return True return False def rmatch_key_val(self, items, kv): name = True", "criteria): return len(self.entries(criteria)) > 0 def not_present(self, criteria): return len(self.entries(criteria)) == 0 def", "is None or len(items) == 0 or rxp is None: return False for", "# (headers, websocket messages, content then we execute our test against that item", "path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val): val = int(val) return 0", "0 def max(self, criteria, measurement_name): items = self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def", "for item in items: if 'name' in kv: name = self.rmatch(item['name'], kv['name']) if", "len(items) == 0 or rxp is None: return False for item in items:", "(lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response':", "'response.content.text', default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda item,", "parse class HarVerifications: def __init__(self, har): self.har = har def rmatch(self, val, str_rxp):", "har def rmatch(self, val, str_rxp): if val is None: return False if isinstance(val,", "measurement_name): items = self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria, measurement_name): items", "match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda item,", "if items is None or len(items) == 0 or rxp is None: return", "return False if isinstance(val, bytes): val = str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE)", "glom to dig into the har entries, responses and websockets to get down", "import validate from jsonschema import ValidationError from jsonpath_ng import parse class HarVerifications: def", "value: return True return False def schema_validate(self, item, schema): try: if type(item) ==", "return False def rmatch_key_val(self, items, kv): name = True value = True if", "'value'in kv: value = self.rmatch(kv['value'], item['value']) if name and value: return True return", "is None: return False for item in items: if 'name' in kv: name", "and value: return True return False def schema_validate(self, item, schema): try: if type(item)", "val == -1 or val is None else val def measure(self, items, measurement):", "def has_json_path(self, json_str, json_path): if self.valid_json(json_str) == False: return False jsonpath_expr = parse(json_path)", "'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item,", "isinstance(val, bytes): val = str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items,", "(lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item,", "False try: json.loads(item) except ValueError as e: return False return True def has_json_path(self,", "def valid_json(self, item): if item is None: return False try: json.loads(item) except ValueError", "(lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item,", "entry in entry_list if filter_lambda(entry, target_value)] return entry_list def gsize(self, item, path): return", "(ValidationError, ValueError) as e: return False return True def valid_json(self, item): if item", "and target_value == 'current': target_value = self.current_page() entry_list = [entry for entry in", "content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item,", "to an array of something or others # (headers, websocket messages, content then", "an array of something or others # (headers, websocket messages, content then we", "rmatch(self, val, str_rxp): if val is None: return False if isinstance(val, bytes): val", "kv is None: return False for item in items: if 'name' in kv:", "self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda item:", "'page' and target_value == 'current': target_value = self.current_page() entry_list = [entry for entry", "self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for", "down to an array of something or others # (headers, websocket messages, content", "kv: name = self.rmatch(item['name'], kv['name']) if 'value'in kv: value = self.rmatch(kv['value'], item['value']) if", "return False def schema_validate(self, item, schema): try: if type(item) == str: item =", "not_present(self, criteria): return len(self.entries(criteria)) == 0 def max(self, criteria, measurement_name): items = self.entries(criteria)", "None: return False for item in items: if isinstance(item, bytes): item = str(item,", "def max(self, criteria, measurement_name): items = self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def get_sum(self,", "re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if items is None or len(items)", "0 or kv is None: return False for item in items: if 'name'", "pgref ), 'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda item,", "item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')),", "# Use glom to dig into the har entries, responses and websockets to", "def measure(self, items, measurement): measurements = { 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers':", "} for filter_name, target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name == 'page'", "measure(self, items, measurement): measurements = { 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda", "value = True if items is None or len(items) == 0 or kv", "path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name, target_value in", "return True def valid_json(self, item): if item is None: return False try: json.loads(item)", "'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item,", "False if isinstance(val, bytes): val = str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def", "def not_present(self, criteria): return len(self.entries(criteria)) == 0 def max(self, criteria, measurement_name): items =", "items: if 'name' in kv: name = self.rmatch(item['name'], kv['name']) if 'value'in kv: value", "'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name, target_value in criteria.items():", "val def measure(self, items, measurement): measurements = { 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')),", "json.loads(item) except ValueError as e: return False return True def has_json_path(self, json_str, json_path):", "from jsonschema import validate from jsonschema import ValidationError from jsonpath_ng import parse class", "return len(self.entries(criteria)) == 0 def max(self, criteria, measurement_name): items = self.entries(criteria) return max(self.measure(items,", "'response.content.text', default=None)),schema)), } for filter_name, target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name", "'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time':", "status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda item, content_rxp:", "def __init__(self, har): self.har = har def rmatch(self, val, str_rxp): if val is", "execute our test against that item # current, *, or filter def entries(self,", "*, or filter def entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters = { 'page':", "== 0 def max(self, criteria, measurement_name): items = self.entries(criteria) return max(self.measure(items, measurement_name), default=0)", "'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item,", "except ValueError as e: return False return True def has_json_path(self, json_str, json_path): if", "'response.content.text', default=None)), path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name,", "self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize') +", "'request.headersSize', default=0)) def not_neg(self, val): val = int(val) return 0 if val ==", "default=0)) def not_neg(self, val): val = int(val) return 0 if val == -1", "self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val): val = int(val) return 0 if val", "'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item,", "(lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item,", "'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item:", "schema_validate(self, item, schema): try: if type(item) == str: item = json.loads(item) validate(instance=item, schema=schema)", "\"utf-8\") if self.rmatch(item, rxp): return True return False def rmatch_key_val(self, items, kv): name", "(lambda item, pgref: glom(item, 'pageref', default='') == pgref ), 'status': (lambda item, status:", "self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria, measurement_name): items = self.entries(criteria) return", "filter_name == 'page' and target_value == 'current': target_value = self.current_page() entry_list = [entry", "self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')), } method =", "default=None)), status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda item,", "self.valid_json(json_str) == False: return False jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches)", "len(self.entries(criteria)) > 0 def not_present(self, criteria): return len(self.entries(criteria)) == 0 def max(self, criteria,", "self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header':", "'name' in kv: name = self.rmatch(item['name'], kv['name']) if 'value'in kv: value = self.rmatch(kv['value'],", "or others # (headers, websocket messages, content then we execute our test against", "'response.status', default=None)), status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda", "import parse class HarVerifications: def __init__(self, har): self.har = har def rmatch(self, val,", "is None or len(items) == 0 or kv is None: return False for", "import re from glom import glom import json from jsonschema import validate from", "self.rmatch(kv['value'], item['value']) if name and value: return True return False def schema_validate(self, item,", "= self.rmatch(kv['value'], item['value']) if name and value: return True return False def schema_validate(self,", "self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')),", "_: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema':", "har_entry_filters = { 'page': (lambda item, pgref: glom(item, 'pageref', default='') == pgref ),", "False return True def has_json_path(self, json_str, json_path): if self.valid_json(json_str) == False: return False", "schema=schema) except (ValidationError, ValueError) as e: return False return True def valid_json(self, item):", "> 0 def not_present(self, criteria): return len(self.entries(criteria)) == 0 def max(self, criteria, measurement_name):", "HarVerifications: def __init__(self, har): self.har = har def rmatch(self, val, str_rxp): if val", "try: if type(item) == str: item = json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError)", "self.gsize(item, 'time')), } method = measurements[measurement] return list(map(method, items)) def present(self, criteria): return", "or filter def entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters = { 'page': (lambda", "default=None)),schema)), } for filter_name, target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name ==", "return max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria, measurement_name): items = self.entries(criteria) return sum(self.measure(items,", "False jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def current_page(self):", "target_value == 'current': target_value = self.current_page() entry_list = [entry for entry in entry_list", "return entry_list def gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val):", "), 'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda item, url_rxp:", "content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header':", "if val == -1 or val is None else val def measure(self, items,", "str: item = json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError) as e: return False", "name = self.rmatch(item['name'], kv['name']) if 'value'in kv: value = self.rmatch(kv['value'], item['value']) if name", "path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), }", "item['value']) if name and value: return True return False def schema_validate(self, item, schema):", "'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid':", "'pageref', default='') == pgref ), 'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)),", "in kv: name = self.rmatch(item['name'], kv['name']) if 'value'in kv: value = self.rmatch(kv['value'], item['value'])", "'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item,", "match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)),", "== 'current': target_value = self.current_page() entry_list = [entry for entry in entry_list if", "entry_list def gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val): val", "json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError) as e: return False return True def", "for item in items: if isinstance(item, bytes): item = str(item, \"utf-8\") if self.rmatch(item,", "Use glom to dig into the har entries, responses and websockets to get", "entry_list if filter_lambda(entry, target_value)] return entry_list def gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize',", "'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item,", "status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)),", "current, *, or filter def entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters = {", "__init__(self, har): self.har = har def rmatch(self, val, str_rxp): if val is None:", "jsonschema import ValidationError from jsonpath_ng import parse class HarVerifications: def __init__(self, har): self.har", "if val is None: return False if isinstance(val, bytes): val = str(val, \"utf-8\")", "criteria): return len(self.entries(criteria)) == 0 def max(self, criteria, measurement_name): items = self.entries(criteria) return", "current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom to dig into the har entries, responses", "False for item in items: if 'name' in kv: name = self.rmatch(item['name'], kv['name'])", "default='') == pgref ), 'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url':", "in items: if 'name' in kv: name = self.rmatch(item['name'], kv['name']) if 'value'in kv:", "val = str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if", "criteria, measurement_name): items = self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria, measurement_name):", "(lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies',", "kv['name']) if 'value'in kv: value = self.rmatch(kv['value'], item['value']) if name and value: return", "(lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize') +", "item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)),", "= True if items is None or len(items) == 0 or kv is", "parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def current_page(self): return self.har['log']['pages'][-1]['id'] #", "and websockets to get down to an array of something or others #", "def entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters = { 'page': (lambda item, pgref:", "'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item,", "dig into the har entries, responses and websockets to get down to an", "item, schema: self.schema_validate(str(glom(item, 'response.content.text', default=None)),schema)), } for filter_name, target_value in criteria.items(): filter_lambda =", "= str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if items", "0 def current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom to dig into the har", "responses and websockets to get down to an array of something or others", "'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers',", "flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if items is None or len(items) == 0", "or rxp is None: return False for item in items: if isinstance(item, bytes):", "then we execute our test against that item # current, *, or filter", "item: self.gsize(item, 'time')), } method = measurements[measurement] return list(map(method, items)) def present(self, criteria):", "match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp:", "self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request':", "max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria, measurement_name): items = self.entries(criteria) return sum(self.measure(items, measurement_name))", "the har entries, responses and websockets to get down to an array of", "others # (headers, websocket messages, content then we execute our test against that", "item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text',", "self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda item:", "False return True def valid_json(self, item): if item is None: return False try:", "val is None: return False if isinstance(val, bytes): val = str(val, \"utf-8\") return", "(lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')), }", "item # current, *, or filter def entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters", "array of something or others # (headers, websocket messages, content then we execute", "criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name == 'page' and target_value == 'current': target_value", "url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp:", "item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)),", "type(item) == str: item = json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError) as e:", "0 def not_present(self, criteria): return len(self.entries(criteria)) == 0 def max(self, criteria, measurement_name): items", "self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path':", "if type(item) == str: item = json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError) as", "list(map(method, items)) def present(self, criteria): return len(self.entries(criteria)) > 0 def not_present(self, criteria): return", "bytes): item = str(item, \"utf-8\") if self.rmatch(item, rxp): return True return False def", "None or len(items) == 0 or rxp is None: return False for item", "our test against that item # current, *, or filter def entries(self, criteria=False):", "return len(self.entries(criteria)) > 0 def not_present(self, criteria): return len(self.entries(criteria)) == 0 def max(self,", "= self.har['log']['entries'] har_entry_filters = { 'page': (lambda item, pgref: glom(item, 'pageref', default='') ==", "if filter_name == 'page' and target_value == 'current': target_value = self.current_page() entry_list =", "False: return False jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0", "content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp:", "items, kv): name = True value = True if items is None or", "('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda", "'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda", "'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda item:", "'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')), } method = measurements[measurement] return list(map(method, items))", "is None: return False for item in items: if isinstance(item, bytes): item =", "(lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType',", "entry_list = [entry for entry in entry_list if filter_lambda(entry, target_value)] return entry_list def", "self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda", "items = self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria, measurement_name): items =", "from jsonpath_ng import parse class HarVerifications: def __init__(self, har): self.har = har def", "jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def current_page(self): return", "isinstance(item, bytes): item = str(item, \"utf-8\") if self.rmatch(item, rxp): return True return False", "that item # current, *, or filter def entries(self, criteria=False): entry_list = self.har['log']['entries']", "class HarVerifications: def __init__(self, har): self.har = har def rmatch(self, val, str_rxp): if", "\"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if items is None", "we execute our test against that item # current, *, or filter def", "== 'page' and target_value == 'current': target_value = self.current_page() entry_list = [entry for", "kv): name = True value = True if items is None or len(items)", "'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')), } method = measurements[measurement]", "item in items: if 'name' in kv: name = self.rmatch(item['name'], kv['name']) if 'value'in", "== False: return False jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) >", "default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item,", "self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body':", "val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if items is None or len(items) ==", "jsonpath_ng import parse class HarVerifications: def __init__(self, har): self.har = har def rmatch(self,", "item is None: return False try: json.loads(item) except ValueError as e: return False", "None: return False try: json.loads(item) except ValueError as e: return False return True", "= self.entries(criteria) return max(self.measure(items, measurement_name), default=0) def get_sum(self, criteria, measurement_name): items = self.entries(criteria)", "measurements[measurement] return list(map(method, items)) def present(self, criteria): return len(self.entries(criteria)) > 0 def not_present(self,", "{ 'page': (lambda item, pgref: glom(item, 'pageref', default='') == pgref ), 'status': (lambda", "item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)),", "int(val) return 0 if val == -1 or val is None else val", "default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda item, schema:", "= jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom", "json_str, json_path): if self.valid_json(json_str) == False: return False jsonpath_expr = parse(json_path) matches =", "= har_entry_filters[filter_name] if filter_name == 'page' and target_value == 'current': target_value = self.current_page()", "# current, *, or filter def entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters =", "has_json_path(self, json_str, json_path): if self.valid_json(json_str) == False: return False jsonpath_expr = parse(json_path) matches", "default=[]), ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item, path:", "== -1 or val is None else val def measure(self, items, measurement): measurements", "validate(instance=item, schema=schema) except (ValidationError, ValueError) as e: return False return True def valid_json(self,", "(lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)),", "False for item in items: if isinstance(item, bytes): item = str(item, \"utf-8\") if", "return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self, val): val = int(val) return 0 if", "self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)),", "str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp): if items is", "['data']), default=[]), ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item,", "validate from jsonschema import ValidationError from jsonpath_ng import parse class HarVerifications: def __init__(self,", "rxp): return True return False def rmatch_key_val(self, items, kv): name = True value", "def rmatch(self, val, str_rxp): if val is None: return False if isinstance(val, bytes):", "against that item # current, *, or filter def entries(self, criteria=False): entry_list =", "e: return False return True def has_json_path(self, json_str, json_path): if self.valid_json(json_str) == False:", "'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')),", "items: if isinstance(item, bytes): item = str(item, \"utf-8\") if self.rmatch(item, rxp): return True", "item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)),", "content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)),", "(lambda item: self.gsize(item, 'time')), } method = measurements[measurement] return list(map(method, items)) def present(self,", "jsonschema import validate from jsonschema import ValidationError from jsonpath_ng import parse class HarVerifications:", "is None: return False try: json.loads(item) except ValueError as e: return False return", "'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')), 'request': (lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item,", "(headers, websocket messages, content then we execute our test against that item #", "not_neg(self, val): val = int(val) return 0 if val == -1 or val", "rmatch_any(self, items, rxp): if items is None or len(items) == 0 or rxp", "self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type':", "def rmatch_key_val(self, items, kv): name = True value = True if items is", "def schema_validate(self, item, schema): try: if type(item) == str: item = json.loads(item) validate(instance=item,", "if self.rmatch(item, rxp): return True return False def rmatch_key_val(self, items, kv): name =", "{ 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda", "filter_name, target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name == 'page' and target_value", "value = self.rmatch(kv['value'], item['value']) if name and value: return True return False def", "items, rxp): if items is None or len(items) == 0 or rxp is", "rxp): if items is None or len(items) == 0 or rxp is None:", "(lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text',", "return len(matches) > 0 def current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom to dig", "default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda", "self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda", "item in items: if isinstance(item, bytes): item = str(item, \"utf-8\") if self.rmatch(item, rxp):", "self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message':", "filter_lambda = har_entry_filters[filter_name] if filter_name == 'page' and target_value == 'current': target_value =", "= har def rmatch(self, val, str_rxp): if val is None: return False if", "bytes): val = str(val, \"utf-8\") return re.search(str_rxp, val, flags=re.IGNORECASE) def rmatch_any(self, items, rxp):", "'time')), } method = measurements[measurement] return list(map(method, items)) def present(self, criteria): return len(self.entries(criteria))", "except (ValidationError, ValueError) as e: return False return True def valid_json(self, item): if", "'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text',", "in items: if isinstance(item, bytes): item = str(item, \"utf-8\") if self.rmatch(item, rxp): return", "True if items is None or len(items) == 0 or kv is None:", "(lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]),", "e: return False return True def valid_json(self, item): if item is None: return", "glom(item, 'pageref', default='') == pgref ), 'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)),", "valid_json(self, item): if item is None: return False try: json.loads(item) except ValueError as", "(lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda item, url_rxp: self.rmatch(str(glom(item, 'request.url',", "= { 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item, 'response.headersSize')), 'request_body':", "from glom import glom import json from jsonschema import validate from jsonschema import", "return False return True def has_json_path(self, json_str, json_path): if self.valid_json(json_str) == False: return", "match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]),", "(lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)), 'json_schema': (lambda item, schema: self.schema_validate(str(glom(item, 'response.content.text',", "for entry in entry_list if filter_lambda(entry, target_value)] return entry_list def gsize(self, item, path):", "import json from jsonschema import validate from jsonschema import ValidationError from jsonpath_ng import", "item, schema): try: if type(item) == str: item = json.loads(item) validate(instance=item, schema=schema) except", "if 'value'in kv: value = self.rmatch(kv['value'], item['value']) if name and value: return True", "'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item,", "method = measurements[measurement] return list(map(method, items)) def present(self, criteria): return len(self.entries(criteria)) > 0", "(lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies', default=[]), match_rgxp)), 'websocket_message': (lambda item, ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages',", "in criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name == 'page' and target_value == 'current':", "= [entry for entry in entry_list if filter_lambda(entry, target_value)] return entry_list def gsize(self,", "for filter_name, target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name == 'page' and", "== 0 or rxp is None: return False for item in items: if", "glom import glom import json from jsonschema import validate from jsonschema import ValidationError", "= measurements[measurement] return list(map(method, items)) def present(self, criteria): return len(self.entries(criteria)) > 0 def", "if isinstance(item, bytes): item = str(item, \"utf-8\") if self.rmatch(item, rxp): return True return", "url_rxp: self.rmatch(str(glom(item, 'request.url', default=None)), url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)),", "ValidationError from jsonpath_ng import parse class HarVerifications: def __init__(self, har): self.har = har", "if item is None: return False try: json.loads(item) except ValueError as e: return", "item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]),", "jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def current_page(self): return self.har['log']['pages'][-1]['id'] # Use glom to", "ws_rxp: self.rmatch_any(glom(item, ('_webSocketMessages', ['data']), default=[]), ws_rxp)), 'json_valid': (lambda item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))),", "item): if item is None: return False try: json.loads(item) except ValueError as e:", "or kv is None: return False for item in items: if 'name' in", "test against that item # current, *, or filter def entries(self, criteria=False): entry_list", "True def has_json_path(self, json_str, json_path): if self.valid_json(json_str) == False: return False jsonpath_expr =", "target_value in criteria.items(): filter_lambda = har_entry_filters[filter_name] if filter_name == 'page' and target_value ==", "return False jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str)) return len(matches) > 0 def", "content then we execute our test against that item # current, *, or", "(lambda item, content_type_rxp: self.rmatch(str(glom(item, 'response.content.mimeType', default=None)), content_type_rxp)), 'request_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]),", "target_value)] return entry_list def gsize(self, item, path): return self.not_neg(glom(item, 'request.headersSize', default=0)) def not_neg(self,", "entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters = { 'page': (lambda item, pgref: glom(item,", "messages, content then we execute our test against that item # current, *,", "} method = measurements[measurement] return list(map(method, items)) def present(self, criteria): return len(self.entries(criteria)) >", "True def valid_json(self, item): if item is None: return False try: json.loads(item) except", "None: return False for item in items: if 'name' in kv: name =", "into the har entries, responses and websockets to get down to an array", "'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda", "def present(self, criteria): return len(self.entries(criteria)) > 0 def not_present(self, criteria): return len(self.entries(criteria)) ==", "in entry_list if filter_lambda(entry, target_value)] return entry_list def gsize(self, item, path): return self.not_neg(glom(item,", "+ self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')), } method = measurements[measurement] return", "'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item,", "ValueError) as e: return False return True def valid_json(self, item): if item is", "json_path): if self.valid_json(json_str) == False: return False jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(json.loads(json_str))", "item, pgref: glom(item, 'pageref', default='') == pgref ), 'status': (lambda item, status: self.rmatch(str(glom(item,", "True return False def rmatch_key_val(self, items, kv): name = True value = True", "-1 or val is None else val def measure(self, items, measurement): measurements =", "if 'name' in kv: name = self.rmatch(item['name'], kv['name']) if 'value'in kv: value =", "len(items) == 0 or kv is None: return False for item in items:", "match_rgxp: self.rmatch_key_val(glom(item, 'request.headers',default=[]), match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie':", "'request': (lambda item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize')", "== str: item = json.loads(item) validate(instance=item, schema=schema) except (ValidationError, ValueError) as e: return", "match_rgxp)), 'response_header': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.headers', default=[]), match_rgxp)), 'request_cookie': (lambda item, match_rgxp:", "filter def entries(self, criteria=False): entry_list = self.har['log']['entries'] har_entry_filters = { 'page': (lambda item,", "True return False def schema_validate(self, item, schema): try: if type(item) == str: item", "measurement): measurements = { 'request_headers': (lambda item: self.gsize(item, 'request.headersSize')), 'response_headers': (lambda item: self.gsize(item,", "entries, responses and websockets to get down to an array of something or", "import glom import json from jsonschema import validate from jsonschema import ValidationError from", "[entry for entry in entry_list if filter_lambda(entry, target_value)] return entry_list def gsize(self, item,", "import ValidationError from jsonpath_ng import parse class HarVerifications: def __init__(self, har): self.har =", "def not_neg(self, val): val = int(val) return 0 if val == -1 or", "get down to an array of something or others # (headers, websocket messages,", "return False try: json.loads(item) except ValueError as e: return False return True def", "item: self.gsize(item, 'response.headersSize')), 'request_body': (lambda item: self.gsize(item, 'request.bodySize')), 'response_body': (lambda item: self.gsize(item, 'request.bodySize')),", "websockets to get down to an array of something or others # (headers,", "or val is None else val def measure(self, items, measurement): measurements = {", "== pgref ), 'status': (lambda item, status: self.rmatch(str(glom(item, 'response.status', default=None)), status)), 'url': (lambda", "item, _: self.valid_json(str(glom(item, 'response.content.text', default=None)))), 'json_path': (lambda item, path: self.has_json_path(str(glom(item, 'response.content.text', default=None)), path)),", "item: self.gsize(item, 'request.bodySize') + self.gsize(item, 'request.headerSize')), 'response': (lambda item: self.gsize(item, 'response.bodySize') + self.gsize(item,", "default=None)), url_rxp)), 'content': (lambda item, content_rxp: self.rmatch(str(glom(item, 'response.content.text', default=None)), content_rxp)), 'content_type': (lambda item,", "(lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'request.cookies', default=[]), match_rgxp)), 'response_cookie': (lambda item, match_rgxp: self.rmatch_key_val(glom(item, 'response.cookies',", "self.gsize(item, 'response.headerSize')), 'time': (lambda item: self.gsize(item, 'time')), } method = measurements[measurement] return list(map(method,", "= self.rmatch(item['name'], kv['name']) if 'value'in kv: value = self.rmatch(kv['value'], item['value']) if name and", "return False return True def valid_json(self, item): if item is None: return False", "rxp is None: return False for item in items: if isinstance(item, bytes): item", "kv: value = self.rmatch(kv['value'], item['value']) if name and value: return True return False", "self.har['log']['pages'][-1]['id'] # Use glom to dig into the har entries, responses and websockets" ]
[ "import logging logger = logging.getLogger(__name__) def json_html_response(request, template_name, code, message): \"\"\" Provide response", "== 'application/json': response = JsonResponse(status=code, data={'results': {'code': code, 'msg': message}}) else: try: template", "String :return: JsonResponse|HttpResponseNotFound :TODO fix format data duplication for v1/utils format for error", "return json_html_response(request, template_name, 500, 'Internal Server Error') def e404(request, template_name='404.html'): return json_html_response(request, template_name,", "v1/utils format for error response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code,", "HttpResponseNotFound from django.template import RequestContext, loader, Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__)", "Server Error') def e404(request, template_name='404.html'): return json_html_response(request, template_name, 404, 'Not Found') def csrf_failure(request,", "'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error 403: ' + str(request)) return json_html_response(request, '403.html',", "html format accordingly content-type :param request: HttpRequest :param template_name: String :param code: Integer", "HttpRequest :param template_name: String :param code: Integer :param message: String :return: JsonResponse|HttpResponseNotFound :TODO", "request: HttpRequest :param template_name: String :param code: Integer :param message: String :return: JsonResponse|HttpResponseNotFound", ":TODO fix format data duplication for v1/utils format for error response \"\"\" if", "from django.template import RequestContext, loader, Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def", "template_name, 404, 'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error 403: ' + str(request)) return", "response = JsonResponse(status=code, data={'results': {'code': code, 'msg': message}}) else: try: template = loader.get_template(template_name)", "return response def e500(request, template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal Server Error') def", "import JsonResponse, HttpResponseNotFound from django.template import RequestContext, loader, Template, TemplateDoesNotExist import logging logger", "django.http import JsonResponse, HttpResponseNotFound from django.template import RequestContext, loader, Template, TemplateDoesNotExist import logging", "e404(request, template_name='404.html'): return json_html_response(request, template_name, 404, 'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error 403:", "format accordingly content-type :param request: HttpRequest :param template_name: String :param code: Integer :param", "logging.getLogger(__name__) def json_html_response(request, template_name, code, message): \"\"\" Provide response in json or html", "logger = logging.getLogger(__name__) def json_html_response(request, template_name, code, message): \"\"\" Provide response in json", "template_name, 500, 'Internal Server Error') def e404(request, template_name='404.html'): return json_html_response(request, template_name, 404, 'Not", "code, 'msg': message}}) else: try: template = loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message)", "message): \"\"\" Provide response in json or html format accordingly content-type :param request:", "404, 'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error 403: ' + str(request)) return json_html_response(request,", "except TemplateDoesNotExist: template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'):", "template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'): return json_html_response(request,", "'msg': message}}) else: try: template = loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message) response", "Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'): return json_html_response(request, template_name, 500,", ":param code: Integer :param message: String :return: JsonResponse|HttpResponseNotFound :TODO fix format data duplication", "django.template import RequestContext, loader, Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def json_html_response(request,", "from django.http import JsonResponse, HttpResponseNotFound from django.template import RequestContext, loader, Template, TemplateDoesNotExist import", "= loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response def", "500, 'Internal Server Error') def e404(request, template_name='404.html'): return json_html_response(request, template_name, 404, 'Not Found')", "json_html_response(request, template_name, code, message): \"\"\" Provide response in json or html format accordingly", "error response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code, data={'results': {'code': code,", "else: try: template = loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request)))", "response def e500(request, template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal Server Error') def e404(request,", "Provide response in json or html format accordingly content-type :param request: HttpRequest :param", "= Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'): return json_html_response(request, template_name,", "accordingly content-type :param request: HttpRequest :param template_name: String :param code: Integer :param message:", ":return: JsonResponse|HttpResponseNotFound :TODO fix format data duplication for v1/utils format for error response", "response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code, data={'results': {'code': code, 'msg':", "fix format data duplication for v1/utils format for error response \"\"\" if request.META.get('CONTENT_TYPE')", "message: String :return: JsonResponse|HttpResponseNotFound :TODO fix format data duplication for v1/utils format for", "message}}) else: try: template = loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message) response =", "json or html format accordingly content-type :param request: HttpRequest :param template_name: String :param", "def e500(request, template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal Server Error') def e404(request, template_name='404.html'):", "{'code': code, 'msg': message}}) else: try: template = loader.get_template(template_name) except TemplateDoesNotExist: template =", "or html format accordingly content-type :param request: HttpRequest :param template_name: String :param code:", "Error') def e404(request, template_name='404.html'): return json_html_response(request, template_name, 404, 'Not Found') def csrf_failure(request, reason=\"\"):", "code: Integer :param message: String :return: JsonResponse|HttpResponseNotFound :TODO fix format data duplication for", "try: template = loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return", "JsonResponse|HttpResponseNotFound :TODO fix format data duplication for v1/utils format for error response \"\"\"", "\"\"\" Provide response in json or html format accordingly content-type :param request: HttpRequest", "JsonResponse, HttpResponseNotFound from django.template import RequestContext, loader, Template, TemplateDoesNotExist import logging logger =", "String :param code: Integer :param message: String :return: JsonResponse|HttpResponseNotFound :TODO fix format data", "data duplication for v1/utils format for error response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json':", "= HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal Server", "def json_html_response(request, template_name, code, message): \"\"\" Provide response in json or html format", "for error response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code, data={'results': {'code':", "= JsonResponse(status=code, data={'results': {'code': code, 'msg': message}}) else: try: template = loader.get_template(template_name) except", "data={'results': {'code': code, 'msg': message}}) else: try: template = loader.get_template(template_name) except TemplateDoesNotExist: template", "duplication for v1/utils format for error response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response", "template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal Server Error') def e404(request, template_name='404.html'): return json_html_response(request,", "'Internal Server Error') def e404(request, template_name='404.html'): return json_html_response(request, template_name, 404, 'Not Found') def", "request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code, data={'results': {'code': code, 'msg': message}}) else: try:", "\"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code, data={'results': {'code': code, 'msg': message}})", "if request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code, data={'results': {'code': code, 'msg': message}}) else:", "response = HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal", ":param template_name: String :param code: Integer :param message: String :return: JsonResponse|HttpResponseNotFound :TODO fix", "loader, Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def json_html_response(request, template_name, code, message):", "format for error response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response = JsonResponse(status=code, data={'results':", "template = loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response", "def csrf_failure(request, reason=\"\"): logger.error('error 403: ' + str(request)) return json_html_response(request, '403.html', 403, 'Forbidden')", "e500(request, template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal Server Error') def e404(request, template_name='404.html'): return", "Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def json_html_response(request, template_name, code, message): \"\"\"", "template_name: String :param code: Integer :param message: String :return: JsonResponse|HttpResponseNotFound :TODO fix format", "json_html_response(request, template_name, 500, 'Internal Server Error') def e404(request, template_name='404.html'): return json_html_response(request, template_name, 404,", "response in json or html format accordingly content-type :param request: HttpRequest :param template_name:", "= logging.getLogger(__name__) def json_html_response(request, template_name, code, message): \"\"\" Provide response in json or", "'application/json': response = JsonResponse(status=code, data={'results': {'code': code, 'msg': message}}) else: try: template =", "return json_html_response(request, template_name, 404, 'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error 403: ' +", "HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'): return json_html_response(request, template_name, 500, 'Internal Server Error')", "template_name, code, message): \"\"\" Provide response in json or html format accordingly content-type", "logging logger = logging.getLogger(__name__) def json_html_response(request, template_name, code, message): \"\"\" Provide response in", ":param message: String :return: JsonResponse|HttpResponseNotFound :TODO fix format data duplication for v1/utils format", "content-type :param request: HttpRequest :param template_name: String :param code: Integer :param message: String", "import RequestContext, loader, Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def json_html_response(request, template_name,", "def e404(request, template_name='404.html'): return json_html_response(request, template_name, 404, 'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error", "template_name='404.html'): return json_html_response(request, template_name, 404, 'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error 403: '", "RequestContext, loader, Template, TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def json_html_response(request, template_name, code,", "loader.get_template(template_name) except TemplateDoesNotExist: template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request,", "Integer :param message: String :return: JsonResponse|HttpResponseNotFound :TODO fix format data duplication for v1/utils", "code, message): \"\"\" Provide response in json or html format accordingly content-type :param", ":param request: HttpRequest :param template_name: String :param code: Integer :param message: String :return:", "json_html_response(request, template_name, 404, 'Not Found') def csrf_failure(request, reason=\"\"): logger.error('error 403: ' + str(request))", "for v1/utils format for error response \"\"\" if request.META.get('CONTENT_TYPE') == 'application/json': response =", "JsonResponse(status=code, data={'results': {'code': code, 'msg': message}}) else: try: template = loader.get_template(template_name) except TemplateDoesNotExist:", "format data duplication for v1/utils format for error response \"\"\" if request.META.get('CONTENT_TYPE') ==", "TemplateDoesNotExist: template = Template(message) response = HttpResponseNotFound(template.render(RequestContext(request))) return response def e500(request, template_name='500.html'): return", "Found') def csrf_failure(request, reason=\"\"): logger.error('error 403: ' + str(request)) return json_html_response(request, '403.html', 403,", "TemplateDoesNotExist import logging logger = logging.getLogger(__name__) def json_html_response(request, template_name, code, message): \"\"\" Provide", "in json or html format accordingly content-type :param request: HttpRequest :param template_name: String" ]
[ "REQDIR = \"requirements\" def read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as", "the $PATH # scripts = get_static(\"scripts\", condition=\"'.' in x\") scripts = None with", "pandas. pass else: # pandas 0.20.2 needs updated numexpr; the claim is 2.4.6,", "to include with package def get_static(name, condition=None): static = [ os.path.join(name, f) for", "] if condition is None: return static else: return [i for i in", "scripts to be added to the $PATH # scripts = get_static(\"scripts\", condition=\"'.' in", "added to the $PATH # scripts = get_static(\"scripts\", condition=\"'.' in x\") scripts =", "except ImportError: # No numexpr is OK for pandas. pass else: # pandas", ":: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language :: Python ::", "# numexpr for pandas try: import numexpr except ImportError: # No numexpr is", "<reponame>stolarczyk/peppy<filename>setup.py #! /usr/bin/env python import os import sys from setuptools import setup REQDIR", "import numexpr except ImportError: # No numexpr is OK for pandas. pass else:", "def get_static(name, condition=None): static = [ os.path.join(name, f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)),", "# scripts to be added to the $PATH # scripts = get_static(\"scripts\", condition=\"'.'", "classifiers=[ \"Development Status :: 4 - Beta\", \"License :: OSI Approved :: BSD", "DEPENDENCIES = read_reqs(\"all\") # numexpr for pandas try: import numexpr except ImportError: #", "= read_reqs(\"all\") # numexpr for pandas try: import numexpr except ImportError: # No", "# deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional keyword arguments for setup(). extra =", "f: long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project metadata manager", "arguments for setup(). extra = {} # Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") #", "not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional keyword arguments for", "= DEPENDENCIES # Additional files to include with package def get_static(name, condition=None): static", "to be added to the $PATH # scripts = get_static(\"scripts\", condition=\"'.' in x\")", "name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project metadata manager for portable encapsulated projects\", long_description=long_description,", ") ] if condition is None: return static else: return [i for i", "\"Programming Language :: Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming", "DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional files to include with package def get_static(name,", "long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4 - Beta\", \"License :: OSI Approved", "\"Programming Language :: Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming", "# No numexpr is OK for pandas. pass else: # pandas 0.20.2 needs", "static)] # scripts to be added to the $PATH # scripts = get_static(\"scripts\",", "with open(\"peppy/_version.py\", \"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description", "failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional files to include with package def", "pandas try: import numexpr except ImportError: # No numexpr is OK for pandas.", "Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr for pandas try: import numexpr except", "long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project metadata manager for", "keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts,", "versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A", "BSD License\", \"Programming Language :: Python :: 3.5\", \"Programming Language :: Python ::", "version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"],", "os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition is None: return static else: return", "keyword arguments for setup(). extra = {} # Ordinary dependencies DEPENDENCIES = read_reqs(\"all\")", "Additional keyword arguments for setup(). extra = {} # Ordinary dependencies DEPENDENCIES =", ":: 3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing, ngs,", ":: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language :: Python ::", "else: return [i for i in filter(lambda x: eval(condition), static)] # scripts to", "\"License :: OSI Approved :: BSD License\", \"Programming Language :: Python :: 3.5\",", "3.7\", \"Programming Language :: Python :: 3.8\", \"Programming Language :: Python :: 3.9\",", "f: for l in f: if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return", "None: return static else: return [i for i in filter(lambda x: eval(condition), static)]", "], keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\",", "that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional files to include with package", "Language :: Python :: 3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata,", "with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l in f: if not l.strip():", "import os import sys from setuptools import setup REQDIR = \"requirements\" def read_reqs(reqs_name):", "deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l in f:", "portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4 - Beta\", \"License", "scripts = None with open(\"peppy/_version.py\", \"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\")", "deps.append(l) return deps # Additional keyword arguments for setup(). extra = {} #", "f: if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional keyword", "project metadata manager for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status ::", "filter(lambda x: eval(condition), static)] # scripts to be added to the $PATH #", "with package def get_static(name, condition=None): static = [ os.path.join(name, f) for f in", "license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"} & set(sys.argv)", "numexpr for pandas try: import numexpr except ImportError: # No numexpr is OK", "f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project metadata manager for portable encapsulated", "Language :: Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language", "def read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l", "encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4 - Beta\", \"License ::", "description=\"A python-based project metadata manager for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development", "be added to the $PATH # scripts = get_static(\"scripts\", condition=\"'.' in x\") scripts", "scripts = get_static(\"scripts\", condition=\"'.' in x\") scripts = None with open(\"peppy/_version.py\", \"r\") as", "<NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"}", "3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\",", "include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"} & set(sys.argv) else []", "is OK for pandas. pass else: # pandas 0.20.2 needs updated numexpr; the", "[] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l in f: if not", "is None: return static else: return [i for i in filter(lambda x: eval(condition),", "deps # Additional keyword arguments for setup(). extra = {} # Ordinary dependencies", "long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4 - Beta\", \"License :: OSI Approved ::", "scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"} & set(sys.argv) else", ":: 4 - Beta\", \"License :: OSI Approved :: BSD License\", \"Programming Language", "#! /usr/bin/env python import os import sys from setuptools import setup REQDIR =", "packages=[\"peppy\"], version=version, description=\"A python-based project metadata manager for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\",", "Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language :: Python", "os.path.join(name, f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition is", "setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project metadata manager for portable encapsulated projects\",", "url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if", "files to include with package def get_static(name, condition=None): static = [ os.path.join(name, f)", "# scripts = get_static(\"scripts\", condition=\"'.' in x\") scripts = None with open(\"peppy/_version.py\", \"r\")", "static = [ os.path.join(name, f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ]", "\"r\") as f: for l in f: if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\"))", "sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"),", "= [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l in f: if", "open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l in f: if not l.strip(): continue", "for l in f: if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps", "= get_static(\"scripts\", condition=\"'.' in x\") scripts = None with open(\"peppy/_version.py\", \"r\") as versionfile:", ":: OSI Approved :: BSD License\", \"Programming Language :: Python :: 3.5\", \"Programming", "version=version, description=\"A python-based project metadata manager for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[", "python import os import sys from setuptools import setup REQDIR = \"requirements\" def", "2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional files to include", "Status :: 4 - Beta\", \"License :: OSI Approved :: BSD License\", \"Programming", "Beta\", \"License :: OSI Approved :: BSD License\", \"Programming Language :: Python ::", "if condition is None: return static else: return [i for i in filter(lambda", "test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"} & set(sys.argv) else [] ),", "extra[\"install_requires\"] = DEPENDENCIES # Additional files to include with package def get_static(name, condition=None):", "from setuptools import setup REQDIR = \"requirements\" def read_reqs(reqs_name): deps = [] with", "OK for pandas. pass else: # pandas 0.20.2 needs updated numexpr; the claim", "as f: long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project metadata", "for pandas. pass else: # pandas 0.20.2 needs updated numexpr; the claim is", "No numexpr is OK for pandas. pass else: # pandas 0.20.2 needs updated", "sys from setuptools import setup REQDIR = \"requirements\" def read_reqs(reqs_name): deps = []", "condition=\"'.' in x\") scripts = None with open(\"peppy/_version.py\", \"r\") as versionfile: version =", "x\") scripts = None with open(\"peppy/_version.py\", \"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with", "Language :: Python :: 3.8\", \"Programming Language :: Python :: 3.9\", \"Topic ::", "with open(\"README.md\") as f: long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based", "l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional keyword arguments for setup().", "python-based project metadata manager for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status", "Python :: 3.8\", \"Programming Language :: Python :: 3.9\", \"Topic :: Scientific/Engineering ::", "f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition is None:", "else: # pandas 0.20.2 needs updated numexpr; the claim is 2.4.6, but that", "- Beta\", \"License :: OSI Approved :: BSD License\", \"Programming Language :: Python", "= {} # Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr for pandas try:", "include with package def get_static(name, condition=None): static = [ os.path.join(name, f) for f", "projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4 - Beta\", \"License :: OSI", ":: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>,", "3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language :: Python :: 3.8\",", "package def get_static(name, condition=None): static = [ os.path.join(name, f) for f in os.listdir(", "Language :: Python :: 3.7\", \"Programming Language :: Python :: 3.8\", \"Programming Language", "claim is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional files", "dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr for pandas try: import numexpr except ImportError:", "pass else: # pandas 0.20.2 needs updated numexpr; the claim is 2.4.6, but", "condition=None): static = [ os.path.join(name, f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) )", "# Additional keyword arguments for setup(). extra = {} # Ordinary dependencies DEPENDENCIES", "but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional files to include with", "return static else: return [i for i in filter(lambda x: eval(condition), static)] #", "# Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr for pandas try: import numexpr", "numexpr except ImportError: # No numexpr is OK for pandas. pass else: #", ":: Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language ::", "for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition is None: return", "= versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version,", ":: Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language ::", "numexpr; the claim is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES #", "tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"} & set(sys.argv) else [] ), **extra", ":: Python :: 3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics,", "metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True,", "metadata manager for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4", "if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional keyword arguments", ":: 3.7\", \"Programming Language :: Python :: 3.8\", \"Programming Language :: Python ::", "to the $PATH # scripts = get_static(\"scripts\", condition=\"'.' in x\") scripts = None", "None with open(\"peppy/_version.py\", \"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f:", "versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description = f.read() setup( name=\"peppy\",", "open(\"README.md\") as f: long_description = f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project", "OSI Approved :: BSD License\", \"Programming Language :: Python :: 3.5\", \"Programming Language", "os import sys from setuptools import setup REQDIR = \"requirements\" def read_reqs(reqs_name): deps", "{} # Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr for pandas try: import", "name) ) ] if condition is None: return static else: return [i for", "\"Topic :: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\",", "pandas 0.20.2 needs updated numexpr; the claim is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\")", "Additional files to include with package def get_static(name, condition=None): static = [ os.path.join(name,", ":: Python :: 3.8\", \"Programming Language :: Python :: 3.9\", \"Topic :: Scientific/Engineering", "License\", \"Programming Language :: Python :: 3.5\", \"Programming Language :: Python :: 3.6\",", "<NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\",", "4 - Beta\", \"License :: OSI Approved :: BSD License\", \"Programming Language ::", "ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=(", "Python :: 3.7\", \"Programming Language :: Python :: 3.8\", \"Programming Language :: Python", "3.8\", \"Programming Language :: Python :: 3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\", ],", "[i for i in filter(lambda x: eval(condition), static)] # scripts to be added", "import setup REQDIR = \"requirements\" def read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)),", "setuptools import setup REQDIR = \"requirements\" def read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR,", "numexpr is OK for pandas. pass else: # pandas 0.20.2 needs updated numexpr;", "static else: return [i for i in filter(lambda x: eval(condition), static)] # scripts", "Language :: Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language", ":: BSD License\", \"Programming Language :: Python :: 3.5\", \"Programming Language :: Python", "Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language :: Python", "workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"]", "author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\",", "as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description = f.read() setup(", "i in filter(lambda x: eval(condition), static)] # scripts to be added to the", "$PATH # scripts = get_static(\"scripts\", condition=\"'.' in x\") scripts = None with open(\"peppy/_version.py\",", ":: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>,", "<NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\", tests_require=read_reqs(\"dev\"), setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"} &", "read_reqs(\"all\") # numexpr for pandas try: import numexpr except ImportError: # No numexpr", "# pandas 0.20.2 needs updated numexpr; the claim is 2.4.6, but that failed.", "get_static(\"scripts\", condition=\"'.' in x\") scripts = None with open(\"peppy/_version.py\", \"r\") as versionfile: version", "open(\"peppy/_version.py\", \"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description =", "ImportError: # No numexpr is OK for pandas. pass else: # pandas 0.20.2", "return [i for i in filter(lambda x: eval(condition), static)] # scripts to be", "continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional keyword arguments for setup(). extra", "import sys from setuptools import setup REQDIR = \"requirements\" def read_reqs(reqs_name): deps =", "= f.read() setup( name=\"peppy\", packages=[\"peppy\"], version=version, description=\"A python-based project metadata manager for portable", "\"Development Status :: 4 - Beta\", \"License :: OSI Approved :: BSD License\",", "\"Programming Language :: Python :: 3.8\", \"Programming Language :: Python :: 3.9\", \"Topic", "setup_requires=( [\"pytest-runner\"] if {\"test\", \"pytest\", \"ptr\"} & set(sys.argv) else [] ), **extra )", "\"requirements\" def read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for", "read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l in", "[ os.path.join(name, f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition", "for setup(). extra = {} # Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr", "in filter(lambda x: eval(condition), static)] # scripts to be added to the $PATH", "as f: for l in f: if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l)", "in x\") scripts = None with open(\"peppy/_version.py\", \"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\")", "Approved :: BSD License\", \"Programming Language :: Python :: 3.5\", \"Programming Language ::", "/usr/bin/env python import os import sys from setuptools import setup REQDIR = \"requirements\"", "= \"requirements\" def read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\") as f:", "return deps # Additional keyword arguments for setup(). extra = {} # Ordinary", "needs updated numexpr; the claim is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] =", "updated numexpr; the claim is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES", "f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition is None: return static", "eval(condition), static)] # scripts to be added to the $PATH # scripts =", "in f: if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional", "0.20.2 needs updated numexpr; the claim is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"]", "# Additional files to include with package def get_static(name, condition=None): static = [", "try: import numexpr except ImportError: # No numexpr is OK for pandas. pass", "for i in filter(lambda x: eval(condition), static)] # scripts to be added to", "get_static(name, condition=None): static = [ os.path.join(name, f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name)", "= None with open(\"peppy/_version.py\", \"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as", "= [ os.path.join(name, f) for f in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if", "Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>,", "is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional files to", ":: 3.8\", \"Programming Language :: Python :: 3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\",", "bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\", license=\"BSD2\", scripts=scripts, include_package_data=True, test_suite=\"tests\",", ":: Python :: 3.7\", \"Programming Language :: Python :: 3.8\", \"Programming Language ::", "manager for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4 -", "setup REQDIR = \"requirements\" def read_reqs(reqs_name): deps = [] with open(os.path.join(REQDIR, \"requirements-{}.txt\".format(reqs_name)), \"r\")", "\"Programming Language :: Python :: 3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project,", "\"r\") as versionfile: version = versionfile.readline().split()[-1].strip(\"\\\"'\\n\") with open(\"README.md\") as f: long_description = f.read()", "\"requirements-{}.txt\".format(reqs_name)), \"r\") as f: for l in f: if not l.strip(): continue #", "deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps # Additional keyword arguments for setup(). extra = {}", "3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language :: Python :: 3.7\",", "Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing, ngs, workflow\", url=\"https://github.com/pepkit/peppy/\", author=u\"<NAME>, <NAME>, <NAME>, <NAME>\",", "condition is None: return static else: return [i for i in filter(lambda x:", "for portable encapsulated projects\", long_description=long_description, long_description_content_type=\"text/markdown\", classifiers=[ \"Development Status :: 4 - Beta\",", "DEPENDENCIES # Additional files to include with package def get_static(name, condition=None): static =", "in os.listdir( os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition is None: return static else:", "l in f: if not l.strip(): continue # deps.append(l.split(\"=\")[0].rstrip(\"<>\")) deps.append(l) return deps #", "setup(). extra = {} # Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr for", "for pandas try: import numexpr except ImportError: # No numexpr is OK for", "\"Programming Language :: Python :: 3.7\", \"Programming Language :: Python :: 3.8\", \"Programming", "Python :: 3.9\", \"Topic :: Scientific/Engineering :: Bio-Informatics\", ], keywords=\"project, metadata, bioinformatics, sequencing,", "os.path.join(os.path.dirname(os.path.realpath(__file__)), name) ) ] if condition is None: return static else: return [i", "the claim is 2.4.6, but that failed. DEPENDENCIES.append(\"numexpr>=2.6.2\") extra[\"install_requires\"] = DEPENDENCIES # Additional", "extra = {} # Ordinary dependencies DEPENDENCIES = read_reqs(\"all\") # numexpr for pandas", "x: eval(condition), static)] # scripts to be added to the $PATH # scripts" ]
[ "json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self):", "yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And - Condition1", "'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName'])", "json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj", "yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError):", "dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName'])", "ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref',", "{'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'],", "reload_module import json import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module", "cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'),", "- $foo - foo: !GetAZs us-east-1 - ValueIfTrue - ValueIfFalse \"\"\" self.obj =", "= yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'],", "loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue':", "!GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest:", "six.moves import reload_module import json import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self):", "- foo: bar NestedTest: !If - !And - !Not [!Condition MyCondition] - !Join", "MyCondition] - !Join - ' ' - - !Ref MyResource - !Sub -", "!Join - ' ' - - !Ref MyResource - !Sub - $foo -", "'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'),", "cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]),", "] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj,", "json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')}", "'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest'])", "self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe()", "with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self):", "- Condition - ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join - '", "self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with", "dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped = yaml.safe_dump(self.doc) if __name__ == '__main__':", "TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If - Condition", "test_dump(self): dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj =", "cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest':", "'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']),", "six from six.moves import reload_module import json import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase):", "' ' - - !Ref MyResource - !Sub - $foo - foo: !GetAZs", "cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName',", "'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest':", "SelectTest: !Select [0, [1, 2, 3]] SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub -", "reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And - Condition1 - Condition2 AZsTest: !GetAZs us-east-1", "3]] SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub - '$foo' - foo: bar NestedTest:", "Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName", "{ 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) } def", "GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If - Condition - ValueIfTrue", "cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']),", "!And - Condition1 - Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition", "self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def", "import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader,", "def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj)", "= \"\"\" AndTest: !And - Condition1 - Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64", "OrTest: !Or - Condition1 - Condition2 RefTest: !Ref ResourceName SelectTest: !Select [0, [1,", "dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped)", "- Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition MyCondition EqualsTest: !Equals", "= json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError):", "3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([", "= cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj =", "SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If - Condition -", "]), 'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def", "loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def", "import unittest import six from six.moves import reload_module import json import yaml import", "[!Condition MyCondition] - !Join - ' ' - - !Ref MyResource - !Sub", "'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ '", "import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in [yaml.representer, yaml.dumper,", "' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]),", "SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub - '$foo' - foo: bar NestedTest: !If", "- hello - world NotTest: !Not [Condition] OrTest: !Or - Condition1 - Condition2", "test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'],", "= yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped = yaml.safe_dump(self.doc) if __name__ == '__main__': unittest.main()", "- '$foo' - foo: bar NestedTest: !If - !And - !Not [!Condition MyCondition]", "unittest import six from six.moves import reload_module import json import yaml import cfn_yaml_tags", "- foo: !GetAZs us-east-1 - ValueIfTrue - ValueIfFalse \"\"\" self.obj = { 'AndTest':", "ValueIfTrue - ValueIfFalse \"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test':", "cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ]", "'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self):", "'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']),", "}, ]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self): loaded_obj =", "= yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self):", "'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [", "AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If - Condition - ValueIfTrue - ValueIfFalse ImportValueTest:", "'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', {", "'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest':", "cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest':", "= json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped", "]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj)", "]) } def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped =", "['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt',", "'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello',", "- ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join - ' ' -", "module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest:", "cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse',", "$foo - foo: !GetAZs us-east-1 - ValueIfTrue - ValueIfFalse \"\"\" self.obj = {", "self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj)", "!Select [0, [1, 2, 3]] SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub - '$foo'", "'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo':", "ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join - ' ' - - hello -", "!If - !And - !Not [!Condition MyCondition] - !Join - ' ' -", "MyResource - !Sub - $foo - foo: !GetAZs us-east-1 - ValueIfTrue - ValueIfFalse", "yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName',", "self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped =", "['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj", "- ValueIfFalse \"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'),", "cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition',", "!Not [!Condition MyCondition] - !Join - ' ' - - !Ref MyResource -", "= cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def", "= yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'})", "json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc)", "from six.moves import reload_module import json import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def", "world NotTest: !Not [Condition] OrTest: !Or - Condition1 - Condition2 RefTest: !Ref ResourceName", "cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName',", "test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped = yaml.safe_dump(self.doc) if", "self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped =", "ImportName JoinTest: !Join - ' ' - - hello - world NotTest: !Not", "'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1',", "ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest:", "'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest':", "foo: bar NestedTest: !If - !And - !Not [!Condition MyCondition] - !Join -", "2, 3]] SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub - '$foo' - foo: bar", "'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo':", "'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self):", "in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And", "!GetAZs us-east-1 - ValueIfTrue - ValueIfFalse \"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']),", "2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]),", "us-east-1 - ValueIfTrue - ValueIfFalse \"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest':", "cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ])", "- Condition1 - Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition MyCondition", "- Condition1 - Condition2 RefTest: !Ref ResourceName SelectTest: !Select [0, [1, 2, 3]]", "NestedTest: !If - !And - !Not [!Condition MyCondition] - !Join - ' '", "'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey',", "test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def", "IfTest: !If - Condition - ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join", "test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped = yaml.dump(self.obj) def test_json(self):", "!FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If", "- - !Ref MyResource - !Sub - $foo - foo: !GetAZs us-east-1 -", "- - hello - world NotTest: !Not [Condition] OrTest: !Or - Condition1 -", "'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj,", "'$foo' - foo: bar NestedTest: !If - !And - !Not [!Condition MyCondition] -", "[1, 2, 3]] SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub - '$foo' - foo:", "]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self): loaded_obj = yaml.load(self.doc)", "cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self):", "'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']),", "[yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And -", "!Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest:", "cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest':", "'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest':", "ResourceName SelectTest: !Select [0, [1, 2, 3]] SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub", "'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName',", "!GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If - Condition - ValueIfTrue -", "'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest':", "cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ',", "self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped", "[ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If - Condition - ValueIfTrue - ValueIfFalse", "reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And - Condition1 - Condition2 AZsTest: !GetAZs", "!Base64 abc ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey,", "ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join - ' ' - -", "AndTest: !And - Condition1 - Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest:", "Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1,", "yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And - Condition1 -", "ResourceName.AttName IfTest: !If - Condition - ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest:", "- !Ref MyResource - !Sub - $foo - foo: !GetAZs us-east-1 - ValueIfTrue", "!And - !Not [!Condition MyCondition] - !Join - ' ' - - !Ref", "foo: !GetAZs us-east-1 - ValueIfTrue - ValueIfFalse \"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1',", "= { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1',", "[Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt", "- ' ' - - !Ref MyResource - !Sub - $foo - foo:", "'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest':", "\"\"\" AndTest: !And - Condition1 - Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc", "yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And - Condition1 - Condition2 AZsTest:", "'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']),", "Condition1 - Condition2 AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition MyCondition EqualsTest:", "[1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([", "'ValueIfFalse', ]) } def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped", "for module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\"", "} def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped = yaml.dump(self.obj)", "!Sub - '$foo' - foo: bar NestedTest: !If - !And - !Not [!Condition", "- ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join - ' ' - - hello", "Condition1 - Condition2 RefTest: !Ref ResourceName SelectTest: !Select [0, [1, 2, 3]] SplitTest:", "'foo/bar'] SubTest: !Sub - '$foo' - foo: bar NestedTest: !If - !And -", "- !Sub - $foo - foo: !GetAZs us-east-1 - ValueIfTrue - ValueIfFalse \"\"\"", "cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest':", "cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) } def test_load(self): loaded_obj", "us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap", "{'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo',", "def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped = yaml.safe_dump(self.doc)", "CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags)", "yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc = \"\"\" AndTest: !And - Condition1 - Condition2", "- !Join - ' ' - - !Ref MyResource - !Sub - $foo", "'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']),", "def test_load(self): loaded_obj = yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped = yaml.dump(self.obj) def", "cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ])", "dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj", "def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName'])", "def setUp(self): for module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc", "!Ref ResourceName SelectTest: !Select [0, [1, 2, 3]] SplitTest: !Split [',', 'foo/bar'] SubTest:", "'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join(['", "'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]),", "'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]),", "json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest']) self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with", "FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest:", "- Condition2 RefTest: !Ref ResourceName SelectTest: !Select [0, [1, 2, 3]] SplitTest: !Split", "json import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in [yaml.representer,", "Condition - ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join - ' '", "EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName]", "- !And - !Not [!Condition MyCondition] - !Join - ' ' - -", "'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest':", "json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName',", "NotTest: !Not [Condition] OrTest: !Or - Condition1 - Condition2 RefTest: !Ref ResourceName SelectTest:", "cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ',", "!Condition MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt", "- ValueIfTrue - ValueIfFalse \"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'),", "test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped =", "{ 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']),", "- ' ' - - hello - world NotTest: !Not [Condition] OrTest: !Or", "<reponame>iRobotCorporation/cfn-yaml-tags import unittest import six from six.moves import reload_module import json import yaml", "AZsTest: !GetAZs us-east-1 Base64Test: !Base64 abc ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1, Value2]", "= {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped) self.assertIn('Fn::GetAtt', json_obj['RefTest']) self.assertNotIn('Ref', json_obj['RefTest'])", "', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1,", "[0, [1, 2, 3]] SplitTest: !Split [',', 'foo/bar'] SubTest: !Sub - '$foo' -", "bar NestedTest: !If - !And - !Not [!Condition MyCondition] - !Join - '", "self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest':", "Base64Test: !Base64 abc ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName,", "MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName,", "'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'),", "' ' - - hello - world NotTest: !Not [Condition] OrTest: !Or -", "!Or - Condition1 - Condition2 RefTest: !Ref ResourceName SelectTest: !Select [0, [1, 2,", "def test_dump(self): dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj", "SubTest: !Sub - '$foo' - foo: bar NestedTest: !If - !And - !Not", "cfn_yaml_tags.Condition('MyCondition'), 'EqualsTest': cfn_yaml_tags.Equals(['Value1', 'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'),", "'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo',", "GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If - Condition - ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue", "cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), },", "Condition2 RefTest: !Ref ResourceName SelectTest: !Select [0, [1, 2, 3]] SplitTest: !Split [',',", "cfn_yaml_tags.If(['Condition', 'ValueIfTrue', 'ValueIfFalse']), 'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest':", "ValueIfFalse \"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest':", "- !Not [!Condition MyCondition] - !Join - ' ' - - !Ref MyResource", "cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'),", "[ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]), 'ValueIfTrue',", "yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe()", "[Condition] OrTest: !Or - Condition1 - Condition2 RefTest: !Ref ResourceName SelectTest: !Select [0,", "', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([ '$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]),", "cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj)", "'$foo', { 'foo': cfn_yaml_tags.GetAZs('us-east-1'), }, ]) ] ]), ]), 'ValueIfTrue', 'ValueIfFalse', ]) }", "self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped = yaml.safe_dump(self.doc) if __name__ ==", "\"\"\" self.obj = { 'AndTest': cfn_yaml_tags.And(['Condition1', 'Condition2']), 'AZsTest': cfn_yaml_tags.GetAZs('us-east-1'), 'Base64Test': cfn_yaml_tags.Base64('abc'), 'ConditionTest': cfn_yaml_tags.Condition('MyCondition'),", "yaml.load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_dump(self): dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped", "self.assertEqual(json_obj['RefTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj =", "'Value2']), 'FindInMapTest': cfn_yaml_tags.FindInMap(['MapName', 'TopLevelKey', 'SecondLevelKey']), 'GetAttListTest': cfn_yaml_tags.GetAtt(['ResourceName', 'AttName']), 'GetAttStringTest': cfn_yaml_tags.GetAtt('ResourceName.AttName'), 'IfTest': cfn_yaml_tags.If(['Condition', 'ValueIfTrue',", "RefTest: !Ref ResourceName SelectTest: !Select [0, [1, 2, 3]] SplitTest: !Split [',', 'foo/bar']", "hello - world NotTest: !Not [Condition] OrTest: !Or - Condition1 - Condition2 RefTest:", "!Not [Condition] OrTest: !Or - Condition1 - Condition2 RefTest: !Ref ResourceName SelectTest: !Select", "def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc) self.assertEqual(loaded_obj, self.obj) def test_safe_dump_fail(self): with self.assertRaises(yaml.representer.RepresenterError): dumped", "yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in [yaml.representer, yaml.dumper, yaml.constructor,", "abc ConditionTest: !Condition MyCondition EqualsTest: !Equals [Value1, Value2] FindInMapTest: !FindInMap [MapName, TopLevelKey, SecondLevelKey]", "cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]:", "[MapName, TopLevelKey, SecondLevelKey] GetAttListTest: !GetAtt [ResourceName, AttName] GetAttStringTest: !GetAtt ResourceName.AttName IfTest: !If -", "cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([ cfn_yaml_tags.And([ cfn_yaml_tags.Not([cfn_yaml_tags.Condition('MyCondition')]), cfn_yaml_tags.Join([ ' ', [ cfn_yaml_tags.Ref('MyResource'), cfn_yaml_tags.Sub([", "' - - !Ref MyResource - !Sub - $foo - foo: !GetAZs us-east-1", "- world NotTest: !Not [Condition] OrTest: !Or - Condition1 - Condition2 RefTest: !Ref", "!Join - ' ' - - hello - world NotTest: !Not [Condition] OrTest:", "!Sub - $foo - foo: !GetAZs us-east-1 - ValueIfTrue - ValueIfFalse \"\"\" self.obj", "'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj =", "self.obj) def test_dump(self): dumped = yaml.dump(self.obj) def test_json(self): json.JSONEncoder().encode({'Fn::ImportValue': 'ImportName'}) dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj)", "'ImportValueTest': cfn_yaml_tags.ImportValue('ImportName'), 'JoinTest': cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest':", "cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',', 'foo/bar']), 'SubTest': cfn_yaml_tags.Sub(['$foo', {'foo': 'bar'}]), 'NestedTest': cfn_yaml_tags.If([", "!If - Condition - ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName JoinTest: !Join -", "cfn_yaml_tags.Join([' ', ['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0,", "JoinTest: !Join - ' ' - - hello - world NotTest: !Not [Condition]", "import json import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in", "['ResourceName', 'AttName']) def test_safe_load_fail(self): with self.assertRaises(yaml.constructor.ConstructorError): yaml.safe_load(self.doc) def test_safe_load_ok(self): cfn_yaml_tags.mark_safe() loaded_obj = yaml.safe_load(self.doc)", "class CfnYamlTagTest(unittest.TestCase): def setUp(self): for module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module)", "import reload_module import json import yaml import cfn_yaml_tags class CfnYamlTagTest(unittest.TestCase): def setUp(self): for", "import six from six.moves import reload_module import json import yaml import cfn_yaml_tags class", "json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped =", "!Ref MyResource - !Sub - $foo - foo: !GetAZs us-east-1 - ValueIfTrue -", "['hello', 'world']]), 'NotTest': cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2,", "setUp(self): for module in [yaml.representer, yaml.dumper, yaml.constructor, yaml.loader, yaml]: reload_module(module) reload_module(cfn_yaml_tags) self.doc =", "ImportValueTest: !ImportValue ImportName JoinTest: !Join - ' ' - - hello - world", "' - - hello - world NotTest: !Not [Condition] OrTest: !Or - Condition1", "!Split [',', 'foo/bar'] SubTest: !Sub - '$foo' - foo: bar NestedTest: !If -", "[',', 'foo/bar'] SubTest: !Sub - '$foo' - foo: bar NestedTest: !If - !And", "with self.assertRaises(yaml.representer.RepresenterError): dumped = yaml.safe_dump(self.obj) def test_safe_dump_ok(self): cfn_yaml_tags.mark_safe() dumped = yaml.safe_dump(self.doc) if __name__", "self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest': cfn_yaml_tags.Ref('ResourceName.AttName')} dumped = cfn_yaml_tags.JSONFromYAMLEncoder().encode(ref_obj) json_obj = json.loads(dumped)", "self.doc = \"\"\" AndTest: !And - Condition1 - Condition2 AZsTest: !GetAZs us-east-1 Base64Test:", "cfn_yaml_tags.JSONFromYAMLEncoder().encode(self.obj) json_obj = json.loads(dumped) self.assertEqual(json_obj['GetAttListTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) self.assertEqual(json_obj['GetAttStringTest']['Fn::GetAtt'], ['ResourceName', 'AttName']) ref_obj = {'RefTest':", "cfn_yaml_tags.Not(['Condition']), 'OrTest': cfn_yaml_tags.Or(['Condition1', 'Condition2']), 'RefTest': cfn_yaml_tags.Ref('ResourceName'), 'SelectTest': cfn_yaml_tags.Select([0, [1, 2, 3]]), 'SplitTest': cfn_yaml_tags.Split([',',", "!ImportValue ImportName JoinTest: !Join - ' ' - - hello - world NotTest:", "!GetAtt ResourceName.AttName IfTest: !If - Condition - ValueIfTrue - ValueIfFalse ImportValueTest: !ImportValue ImportName" ]
[ "if isinstance(term, Compound): args = [] for arg in term.value[1:]: args.append(ground(arg, mgu)) return", "v2 = evaluate_expression(e2) return v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2)", "if n.value >= len(term.value): return False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list)", "It has the side effect of exiting from the processor and returning to", "is true iff Instances unifies with the list of values to which a", "clause body to a # goal and on access permission to a user-defined", "core._FLAGS[flag.name] = f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true", "E1 is greater than or equal to the corresponding arithmetic value of E2.\"\"\"", "preserve the binding for t2 just in # case t2 were a renamed", "[Atomic(c) for c in atom.name] return self.unify(charlist, List.from_list(chars)) else: chars = [c.name for", "Arithmetic comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true", "goal term of Goal * else G is T.''' if term.name == '^'", "isnumber, islist, ispartiallist, iscallable from ..core import BuiltIn ### ### Term unification (ISO", "x < 0 and isinstance(y, float): from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') if", "isinstance(after, Atomic): self.data = [(d, p) for (d, p) in self.data if len(n)", "return x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering of two", "isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist): from ..parser import", "the successive characters of the name of atom Atom.''' def execute(self, atom, charlist):", "8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and Y are NSTO (Not", "of a goal for each different instantiation of the free variables in that", "def pick_data(self, atom1, atom2, atom12): c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2,", "before.value if isinstance(before, Atomic) else 0 end = len(n) - (after.value if isinstance(after,", "Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff X is a member of", "'''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is true iff characters of", "halt neither succeeds nor fails. It has the side effect of exiting from", "atom.name start = before.value if isinstance(before, Atomic) else 0 end = len(n) -", "self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c", "+callable_term, ?list) setof/3 assembles as a list the solutions of a goal for", "False return self.pick_bag(template, goal, instances) def pick_bag(self, template, goal, instances): wt = self.s[0]", "def execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom) if", "list the solutions of Goal for each different instantiation of the free variables", "No undefined errors if isvariable(term): from ..core import PrologInstantiationError raise PrologInstantiationError() if term.arity", "not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag',", "term.name == '^' and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s", "t_list = List.from_list(t_list) self.s = s_next return self.unify(t_list, instances) def _create_solution_list(self, s): return", "term) def search_evaluable_functor(name): import math import operator d = {'+/2' : operator.add, '*/2'", "else: head = clause body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable',", "not isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))):", "self.pick_flag(flag, value) def reexecute(self, flag, value): self.reset_substitution() if not self.flags: return False return", "is less than or equal to the corresponding arithmetic value of E2.\"\"\" def", "if T unifies with ^(V, G) then EV is the union of the", "@evaluable) '<'(E1, E2) is true iff evaluating E1 and E2 as expressions the", "if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return", "1: t = elements.head return self.unify(term, t) elif len(elements) > 1: name =", "PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n >> s def leftshift(n, s): '''Redefined w.r.t.", "class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It is used to add Clause to", "return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI is a predicate", "### ### Term comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the", "functions def free_variable_set(t, v): '''The free variable set FV of a term T", "== 0: return Atomic(head.name) from ..core import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator)", "indicator for one of the user-defined procedures in the database.''' def execute(self, pi):", "and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term)", "return False ### ### Arithmetic evaluation (ISO 8.6) ### Simple arithmetic functors (ISO", "List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if isinstance(name, Atomic)", "if isinstance(x, int) else s def float_integer_part(x): '''Redefined w.r.t. Python because in ISO", "not identical terms.\"\"\" def execute(self, x, y): # The Python __ne__ method does", "= [(d, p) for (d, p) in self.data if n.index(d, p) == before.value]", "iff: * Term is an atomic term and List is the list whose", ": TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term creation and decomposition", "t2.name in mgu: # Still preserve the binding for t2 just in #", "from a clause renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce()", "false.\"\"\" def execute(self, term): if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from ..", "from ..core import PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name): import math import operator", "Also, we need to manage ZeroDivisionError errors on our own.''' if x <", "and * there is a clause in the database which corresponds to a", "on the conversion of the clause body to a # goal and on", "core for clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head)", "exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor fails. It has the side", "not isvariable(x) and not isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) #", "*t) return self.unify(term, c) return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term,", "SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value) enables the value associated with a", "of the atom Atom_2 to the characters of the name of the atom", "of the name of AtomR.''' def execute(self, atom, before, length, after, subatom): if", "y.name == '_'): return True return x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term)", "else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles as a list the", "of two terms. '@<'(X, Y) is true iff X preceeds Y.\"\"\" def execute(self,", "iff X is a member of the set F.''' def execute(self, x): return", "mgu: t2.apply(mgu) # Do not propagate renamed term variables bindings # outside the", "else clause if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # errors on the", "''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true iff the character", "hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2, 'halt/0' : Halt_0, 'halt/1'", "the single # goal, even when using parentheses! '\\+/1' : Not_1, 'not/1' :", "i = modf(x) return i def float_fractional_part(x): '''Redefined w.r.t. Python because in ISO", "not isvariable(char) and len(char.name) != 1: self.throw_type_error('character', char) if not isvariable(code) and not", "pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from .. import core for", "term, name, arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error()", "member of the set A.''' def execute(self, x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term)", "Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char)) for char in atom.name] return", "is T.''' if term.name == '^' and term.arity == 2: return iterated_goal_term(term.value[2]) return", "not None: if mgu: t2.apply(mgu) # Do not propagate renamed term variables bindings", "All solutions (ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances)", "list of the arguments of Term.\"\"\" def execute(self, term, elements): if isvariable(term) and", "pi.arity == 2: name, arity = pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if", "import deref t = deref(t) v = deref(v) if isinstance(t, Atomic) and isinstance(v,", "output.''' def execute(self, number, charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for", "execute(self, x, y): from .. import core return core.unify(x, y) is None ###", "as float) on our own.''' if not isinstance(n, int): from ..core import PrologTypeError", "not isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom) if", "ordering of two terms. '@>'(X, Y) is true iff Y preceeds X.\"\"\" def", "and returning to whatever system invoked Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer)", "creation and destruction (ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It", "true iff X is a member of the set A.''' def execute(self, x):", "True def reexecute(self): return True ### ### Atomic term processing (ISO 8.16) ###", "*fvs) if fvs else Atomic('witness') g = iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness,", "?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before, Length, After, Sub_atom) is true iff atom", "from ..parser import PrologParser, InvalidTermException s = ''.join([chr(code.value) for code in codelist.as_list()]) try:", "user-defined procedures in the database.''' def execute(self, pi): if not isvariable(pi) and not", "s.as_list() solutions = list(set(solutions)) solutions.sort() return solutions ### ### Logic and control (ISO", "errors on our own.''' if x < 0 and isinstance(y, float): from ..core", "of the name of atom Atom.''' def execute(self, atom, charlist): if not isvariable(atom)", "isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO No", "\"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true iff call(Term) is false.\"\"\" def execute(self, term):", "isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom',", "true iff X is a member of the set I.''' def execute(self, x):", "is a set of variables defined as the set difference of the variable", "from math import modf f, i = modf(x) return f def power(x, y):", "the empty set.''' s = set() if isinstance(term, Atomic) or isvariable(term): return s", "s = set() if isinstance(term, Atomic) or isvariable(term): return s if term.name ==", "if not self.indicators: return False # the order in which predicate indicators are", "from ..parser import isvariable, isatom, isnumber, islist, ispartiallist, iscallable from ..core import BuiltIn", "\"\"\"'\\=='(@term, @term) Test the ordering of two terms. '\\=='(X, Y) is true iff", "x < 0 is defined only when y is an integer, and always", "X=Template\" after systematic replacement of all variables in X by new variables.''' def", "if T is a variable then Sv is {T} * else if T", "Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' : Number_1, # Term comparison", "class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true iff evaluating E1 and E2", "if head.arity == 0: return Atomic(head.name) from ..core import renamed_copy return renamed_copy(head) class", "body = clause.value[2] else: head = clause body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error()", "it.''' def execute(self, flag, value): from .. import core # for flags if", "Pred and all its clauses, leaving the database in the same state as", "is a list of the arguments of Term.\"\"\" def execute(self, term, elements): if", "as expressions the corresponding arithmetic value of E1 is greater than the corresponding", "a clause Clause which unifies with Head :- Body. It is used to", "isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO Missing", "Prolog x ** y with x < 0 is defined only when y", "after) if isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value", "an atomic term and List is the list whose only element is Term,", "def divide(x, y): '''Redefined w.r.t. Python because in ISO Prolog div(x, y) with", "Can't directly use BuiltIn.unify because the bindings # between the renamed copy of", "return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It is used to add", "as the union of the variable set of V and the existential variable", "It is used to add Clause to the database after all existing clauses", "values.append(v._copy_term()) result = caller.solve_next() values = List.EMPTY_LIST if not values else List.from_list(values) return", "permission to a user-defined # procedure is handled directly by the database from", "### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It is used to add Clause", "return self.unify(term, t) else: return False else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term)", "less than or equal to the corresponding arithmetic value of E2.\"\"\" def execute(self,", "'''clause(+head, ?callable_term) clause(Head, Body) is true iff: * the predicate of Head is", "from ..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core import PrologTypeError", "squareroot(x): '''Redefined w.r.t. Python because we need to manage ValueError errors (e.g. for", "not result class Repeat_0(BuiltIn): '''repeat repeat is true. repeat is re-executable. ''' def", "& y def bitor(x, y): '''Redefined w.r.t. Python because we need to manage", "def execute(self, x, y): return x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test", "== v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true iff evaluating E1", "return int(s) if isinstance(x, int) else s def float_integer_part(x): '''Redefined w.r.t. Python because", "return self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1, atom2, atom12): self.reset_substitution() if not self.data:", "(self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if isinstance(name, Atomic) and arity.value", "return self.unify(t_list, instances) def _create_solution_list(self, s): return [] if s == List.EMPTY_LIST else", "the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2): if isvariable(e1) or", "true. It is used to remove from the database the procedure specified by", "x): return not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is true if X is", "return v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true iff", "y) is valid only when x and y are integers. Also, we need", "isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity related error", "* Term is an atomic term and List is the list whose only", "def execute(self, atom, charlist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if", "body) self.clauses = [] procedure = self.kb.search(head) if not procedure: return False if", "is handled directly by the database t = tuple(Variable('_') for i in range(arity.value))", "isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(y, int):", "isvariant(a1, a2): return False return True return False PREDICATES = { # Term", "or CT.''' def execute(self, x): return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is", "w.r.t. Python because in ISO Prolog mod(x, y) is valid only when x", "expression): if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term): #", "return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true iff", "Not_1, 'repeat/0' : Repeat_0, # Atomic term processing (ISO 8.16) 'atom_length/2' : AtomLength_2,", "Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else: return False else: return False class CopyTerm_2(BuiltIn):", "### Bitwise functors (ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is", "existential variable set of T.''' vst = variable_set(t) vsv = variable_set(v) est =", "int): self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom', name) # TODO Missing max_arity related", "not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x, int): from", "if clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2] else: head =", "'findall/3' : Findall_3, 'bagof/3' : Bagof_3, 'setof/3' : Setof_3, # Logic and control", "'\\\\+'(Term) is true iff call(Term) is false.\"\"\" def execute(self, term): if isvariable(term): self.throw_instantiation_error()", ".. import core # for flags if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag)", "v2 = evaluate_expression(e2) return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2)", "elements.head.name t = Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else: return False else: return", "def pick_data(self, atom, before, length, after, subatom): s, p = self.data.pop(0) b =", "only when x is a float.''' if not isinstance(x, float): from ..core import", "G is T.''' if term.name == '^' and term.arity == 2: return iterated_goal_term(term.value[2])", "def execute(self, number, codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element", "return {term} else: term = deref(term) if isinstance(term, Atomic): return set() s =", "Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer,", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x & y def", "'>='(E1, E2) is true iff evaluating E1 and E2 as expressions the corresponding", "Atomic): self.throw_type_error('atomic', name) if isvariable(term) and not isatom(name) and arity.value > 0: self.throw_type_error('atom',", "database t = tuple(Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) from", "set I or F and is false if X is a member of", "is true iff X and Y are not identical terms.\"\"\" def execute(self, x,", "chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true", "found is undefined.''' def execute(self, template, goal, instances): fvs = free_variable_set(goal, template) self.witness", "= evaluate_expression(e2) return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is", "'''halt(+integer) halt(X) neither succeeds nor fails. It has the side effect of exiting", "isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(atom): from", "* else G is T.''' if term.name == '^' and term.arity == 2:", "if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable):", "= List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l) if isinstance(term, Variable): # elements is", "which each list is found is undefined.''' def _create_solution_list(self, s): solutions = []", "if (not isvariable(after) and not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before)", "chars = [chr(code.value) for code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or", "return False return x >= y ### ### Term creation and decomposition (ISO", "!= y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering of two terms. '@<'(X,", "iff Instances unifies with the list of values to which a variable X", "not isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before) and not (isnumber(before) and isinstance(before.value, int))):", "of Goal * else G is T.''' if term.name == '^' and term.arity", "@evaluable) '=<'(E1, E2) is true iff evaluating E1 and E2 as expressions the", "n.index(d, p) - len(d) == after.value] if isinstance(subatom, Atomic): self.data = [(d, p)", "0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term)", "<< s def bitand(x, y): '''Redefined w.r.t. Python because we need to manage", "PrologTypeError('integer', Atomic(s)) return x & y def bitor(x, y): '''Redefined w.r.t. Python because", "for char in charlist.as_list()]) try: # the parser needs an End Token n", "self.throw_type_error('atom', atom1) if not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12)", "write_canonical/1 lst = [Atomic(ord(c)) for c in chars] return self.unify(codelist, List.from_list(lst)) ### ###", "execute(self, template, goal, instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if (not", "Retract_1, 'abolish/1' : Abolish_1, # All solutions (ISO 8.10) 'findall/3' : Findall_3, 'bagof/3'", "'=<'(E1, E2) is true iff evaluating E1 and E2 as expressions the corresponding", "None} if not self.flags: return False return self.pick_flag(flag, value) def reexecute(self, flag, value):", "Atomic): self.data = [(d, p) for (d, p) in self.data if n.index(d, p)", "iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true iff call(Term)", "var(X) is true iff X is a member of the set V.''' def", "'/' and pi.arity == 2: name, arity = pi.value[1:] if isvariable(name) or isvariable(arity):", "number(X) is true if X is a member of the set I or", "> v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true iff evaluating E1", "names are the successive characters of the name of atom Atom.''' def execute(self,", "a member of the set I or F and is false if X", "c = chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code): c = ord(char.name) return self.unify(code,", "three pieces, AtomL, Sub_atom, and AtomR, such that Before is the number of", "isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and not", "self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and not ispartiallist(charlist):", "there is a clause in the database which corresponds to a term H", "pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ### ### All solutions (ISO 8.10)", "end: for i in range(start, end + 1): self.data.append((n[start:i], start)) start += 1", "c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is", "deref args = (evaluate_expression(deref(a)) for a in term.value[1:]) pi = term.predicate_indicator() functor =", "ispartiallist(charlist): self.throw_type_error('list', charlist) for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element) and", "isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) from", "(Compound, List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if isinstance(name,", "class Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff X is a member of the", "variable X not occurring in Template or Goal would be instantiated by successive", "A.''' def execute(self, x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff", "true if X is a member of the set A or I or", "corresponding arithmetic values are not equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or", "self.throw_instantiation_error() if islist(elements) and not isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom', elements.head) if", "bindings # between the renamed copy of t1 and t2 retain validity #", "== 2: return iterated_goal_term(term.value[2]) return term def isvariant(t, v): '''Two terms are variants", "islist(elements) and isinstance(elements.head, Compound) and len(elements) > 1: self.throw_type_error('atomic', elements.head) if isvariable(term) and", "indicator = Compound('/', Atomic(n), Atomic(int(a))) from .. import core if core.unify(pi, indicator) is", "self.throw_instantiation_error() if not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) for element in charlist.as_list():", "raise PrologEvaluationError('zero_divisor') if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x))", "e not in wt_list] from .. import core for wwtt, t in zip(wt_list,", "return n << s def bitand(x, y): '''Redefined w.r.t. Python because we need", "return x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering of two", "1): self.data.append((n[start:i], start)) start += 1 if isinstance(before, Atomic): self.data = [(d, p)", "atom12.name self.data = [(s[:i], s[i:], s) for i in range(len(s) + 1)] elif", "if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(charlist)", "we need to manage ZeroDivisionError errors on our own.''' if x < 0", "of the set A or I or F and is false if X", ": leftshift, '/\\\\/2' : bitand, '\\\\//2' : bitor, '\\\\/1' : bitnot} return d.get(name)", "set EV of a term T is a set of variables defined recursively", "if not s.endswith(atom2.name): return False else: i = s.index(atom2.name) self.data = [(s[:i], s[i:],", "the union of the variable sets for each of the arguments of T.'''", "i = len(atom1.name) self.data = [(s[:i], s[i:], s)] else: n1 = atom1.name n2", "subatom) def pick_data(self, atom, before, length, after, subatom): s, p = self.data.pop(0) b", "isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value < 0:", "isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements) if", "or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <= v2", "results from replacing each variable X in the former by Xs.''' from ..core", "else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ### ### All solutions (ISO 8.10) ###", "the one-char atoms corresponding to a character sequence of Number which could be", "math import copysign s = copysign(1, x) return int(s) if isinstance(x, int) else", "..core import deref t = deref(t) v = deref(v) if isinstance(t, Atomic) and", "self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither succeeds", "set difference of the variable set of T and BV where BV is", "ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true iff evaluating E1 and E2 as", "len(d) == after.value] if isinstance(subatom, Atomic): self.data = [(d, p) for (d, p)", "vst = variable_set(t) vsv = variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO", "def execute(self, n, term, arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value,", "def execute(self, term): if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from .. import", "+atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is true iff characters of the name", "0) self.data = [] while start <= end: for i in range(start, end", "corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2):", ".. import core return core.unify(x, y) is None ### ### Type testing (ISO", "Term, or * Term is a compound term and List is the list", "a float. Also, we need to manage ZeroDivisionError errors on our own.''' if", "// y is valid only when x and y are integers. Also, we", "term = deref(term) if isinstance(term, Atomic): return set() s = set() if isinstance(term,", "Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from .. import core for clause in", "# FIXME this should use write_canonical/1 lst = [Atomic(c) for c in chars]", "+character_code_list) atom_codes(Atom, List) is true iff List is a list whose elements correspond", "term of Goal * else G is T.''' if term.name == '^' and", "?term) arg(N, Term, Arg) is true iff the Nth argument of Term is", "of the name of the atom Atom_2 to the characters of the name", "Goal for each different instantiation of the free variables in it. The elements", ": Not_1, 'repeat/0' : Repeat_0, # Atomic term processing (ISO 8.16) 'atom_length/2' :", "= List(term) return self.unify(elements, l) if isinstance(term, Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:]))", "context of the copy_term/2 built-in mgu = core.unify(t2, t) if mgu is not", "not term.value: value = mgu.get(term.name) if value: return value else: return ground(term.binding(), mgu)", "# procedure is handled directly by the database from ..builtin import search_builtin if", "of two terms. '\\=='(X, Y) is true iff X and Y are not", "'_' and y.name == '_'): return True return x != y class TermLessThan_2(BuiltIn):", "head = clause.value[1] body = clause.value[2] else: head = clause body = Atomic.TRUE", "member of the set I.''' def execute(self, x): return x.arity == 0 and", "arity = pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer',", "if not isinstance(n.value, int): self.throw_type_error('integer', n) if not isinstance(term, Compound): self.throw_type_error('compound', term) if", "After, Sub_atom) is true iff atom Atom can be broken into three pieces,", "crashes if you attempt to unify two STO terms by =/2 # instead", "def execute(self): return True def reexecute(self): return True ### ### Atomic term processing", "return self.unify(values, instances) def ground(term, mgu): if isinstance(term, Variable): if not term.value: value", "if not self.s: return False return self.pick_bag(template, goal, instances) def pick_bag(self, template, goal,", "t.name != v.name or t.arity != v.arity: return False bijection = {} for", "in codelist.as_list()]) try: # the parser needs an End Token n = PrologParser(s", "and not isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom)", "'''assertz(@clause) assertz(Clause) is true. It is used to add Clause to the database", "{ # Term unification (ISO 8.2) '=/2' : Unify_2, '\\=/2' : NotUnifiable_2, #", "even when using parentheses! '\\+/1' : Not_1, 'not/1' : Not_1, 'repeat/0' : Repeat_0,", "not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before) and not (isnumber(before)", "B which unifies with Head :- Body.''' def execute(self, head, body): if isvariable(head):", "and after.value < 0: self.throw_domain_error('not_less_than_zero', after) n = atom.name start = before.value if", "culprit = Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True class", "execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 =", "re-executions of \"call(Goal), X=Template\" after systematic replacement of all variables in X by", "name of the atom Atom.''' def execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error() if", "return term.value if isinstance(term, Compound): from ..core import deref args = (evaluate_expression(deref(a)) for", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x | y def", "isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 ==", "self.pick_data(atom, before, length, after, subatom) def reexecute(self, atom, before, length, after, subatom): self.reset_substitution()", "### Clause creation and destruction (ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is", "Python because we need to manage ValueError errors (e.g. for x < 0)", "values = List.EMPTY_LIST if not values else List.from_list(values) return self.unify(values, instances) def ground(term,", "self.flags = {f for f in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not None}", "is handled directly by the database from ..builtin import search_builtin if search_builtin(head): pi", "operator.abs, 'sign/1' : sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' : power,", "in ISO Prolog x ** y with x < 0 is defined only", "= a2 else: if not isvariant(a1, a2): return False return True return False", "Logic and control (ISO 8.15) # FIXME \\+ does not work because of", "predicate indicator Pred and all its clauses, leaving the database in the same", "int) else s def float_integer_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_integer_part(x)", "?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true iff List is a list whose elements", "and len(elements) > 1: self.throw_type_error('atomic', elements.head) if isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list',", "bijection s of the variables of the former to the variables of the", "manage ValueError errors (e.g. for x < 0) on our own.''' if x", "core # for flags if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) if", "float, 'abs/1' : operator.abs, 'sign/1' : sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part,", "v2 = evaluate_expression(e2) return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2)", "for (d, p) in self.data if len(n) - n.index(d, p) - len(d) ==", "Clause retrival and information (ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body)", "between the renamed copy of t1 and t2 retain validity # only in", "return self.unify(term, name) and self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name)", "for e in term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term): '''The existential variables set", "isinstance(n.value, int): self.throw_type_error('integer', n) if not isinstance(term, Compound): self.throw_type_error('compound', term) if n.value <", "self.clauses_to_remove = [] procedure = self.kb.search(head) if not procedure: return False from ..", "not isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n >>", "term results from replacing each variable X in the former by Xs.''' from", "set I.''' def execute(self, x): return x.arity == 0 and isinstance(x.value, int) class", "@evaluable) '=\\='(E1, E2) is true iff evaluating E1 and E2 as expressions the", "terms by =/2 # instead of using the proper unify_with_occur_check/2 predicate. return self.unify(x,", "X is a member of the set A or I or F and", "v): '''Two terms are variants if there is a bijection s of the", "FIXME this should use write_canonical/1 lst = [Atomic(c) for c in chars] return", "succeeds nor fails. It has the side effect of exiting from the processor", "n2 = atom2.name self.data = [(n1, n2, n1 + n2)] return self.pick_data(atom1, atom2,", "in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number)", "isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is true if X is a member of", "b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2':", "x is a float.''' if not isinstance(x, float): from ..core import PrologTypeError raise", "'''Redefined w.r.t. Python because we need to manage TypeError errors (e.g. x or", "### Other arithmetic functors (ISO 9.3) ### Bitwise functors (ISO 9.4) ### class", "'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2'", "before.value] if isinstance(length, Atomic): self.data = [(d, p) for (d, p) in self.data", "code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char))", "variables bindings # outside the context of the copy_term/2 built-in if t2.name in", "self.s[0] wt_list = [e for e in self.s if isvariant(wt.value[1], e.value[1])] t_list =", "X and Y are not identical terms.\"\"\" def execute(self, x, y): # The", "if n.value < 0: self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value): return False return", "value) def reexecute(self, flag, value): self.reset_substitution() if not self.flags: return False return self.pick_flag(flag,", "V is a set of variables defined as the set difference of the", "body) def pick_clause(self, head, body): if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c =", "isvariable(pi) and not (pi.name == '/' and pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators", "not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c)", "E2) is true iff evaluating E1 and E2 as expressions the corresponding arithmetic", "int) else 0.0 from math import copysign s = copysign(1, x) return int(s)", "logarithm(x): '''Redefined w.r.t. Python because we need to manage ValueError errors (e.g. for", "if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value <", "terms. '@<'(X, Y) is true iff X preceeds Y.\"\"\" def execute(self, x, y):", "for i in self.kb: n, a = i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a)))", "directly use BuiltIn.unify because the bindings # between the renamed copy of t1", "s)] else: n1 = atom1.name n2 = atom2.name self.data = [(n1, n2, n1", "..core import PrologTypeError raise PrologTypeError('float', Atomic(x)) from math import modf f, i =", ":- B which unifies with Head :- Body.''' def execute(self, head, body): if", "char) if not isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer', code) if not isvariable(code):", "of the variable set of V and the existential variables set of the", "return x.arity == 0 and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true", "if islist(elements) and not isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom', elements.head) if islist(elements)", "if not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2): s", "elements.arity == 2: if len(elements) == 1: t = elements.head return self.unify(term, t)", "return i def float_fractional_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_fractional_part(x) is", "execute(self, x, y): # The Python __eq__ method does not hold Prolog #", "the atom Atom_2 to the characters of the name of the atom Atom_1.'''", "Bitwise functors (ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true", "atom, before, length, after, subatom): self.reset_substitution() if not self.data: return False return self.pick_data(atom,", "\"\"\"'='(?term, ?term) If X and Y are NSTO (Not Subject To Occur-check) then", "least one dynamic procedure with a clause Clause which unifies with Head :-", "isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length) and length.value", "before, length, after, subatom): self.reset_substitution() if not self.data: return False return self.pick_data(atom, before,", "is the value currently associated with it.''' def execute(self, flag, value): from ..", "subatom.value] if not self.data: return False return self.pick_data(atom, before, length, after, subatom) def", "you attempt to unify two STO terms by =/2 # instead of using", "def reexecute(self, template, goal, instances): self.reset_substitution() if not self.s: return False return self.pick_bag(template,", "isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag)", "NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and Y are NSTO (Not Subject To Occur-check)", "'atomic/1' : Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' : Number_1, #", "and Value is the value currently associated with it.''' def execute(self, flag, value):", "list for e in term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term): '''The existential variables", "len(char.name) != 1: self.throw_type_error('character', char) if not isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer',", "or islist(codelist): from ..parser import PrologParser, InvalidTermException s = ''.join([chr(code.value) for code in", "Y are identical terms.\"\"\" def execute(self, x, y): # The Python __eq__ method", "length, after, subatom) def pick_data(self, atom, before, length, after, subatom): s, p =", "s.update(variable_set(arg)) else: # a list for e in term.as_list(): s.update(variable_set(e)) return s def", "related errors if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) # error on access permission", "isvariant(t, v): '''Two terms are variants if there is a bijection s of", "not iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses =", "E1 and E2 as expressions the corresponding arithmetic value of E1 is less", "Compound): from ..core import deref args = (evaluate_expression(deref(a)) for a in term.value[1:]) pi", "self.witness = Compound('witness', *fvs) if fvs else Atomic('witness') g = iterated_goal_term(goal) findall =", "[Atomic(ord(c)) for c in chars] return self.unify(codelist, List.from_list(lst)) ### ### Implementation defined hooks", "decomposition (ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term,", "len(elements) > 1: self.throw_type_error('atomic', elements.head) if isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements)", "self.throw_instantiation_error() if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(atom): from ..core", "ispartiallist(instances))): self.throw_type_error('list', instances) from .. import core caller = core.Caller() caller._kb = self.kb", "of AtomL, Length is the number of characters of the name of Sub_atom,", "is true. It is used to remove from the database the procedure specified", "Sub_atom, and After is the number of characters of the name of AtomR.'''", "self.unify(elements, l) if isinstance(term, Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l)", "and not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for element in charlist.as_list(): if isatom(element)", "in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element)", "repeat is true. repeat is re-executable. ''' def execute(self): return True def reexecute(self):", "Term_1.''' def execute(self, t1, t2): from .. import core #t = core.renamed_copy(t1) t", "variables set EV of a term T is a set of variables defined", "if isinstance(after, Atomic) else 0) self.data = [] while start <= end: for", "Term_2 unifies with a term T which is a renamed copy of Term_1.'''", "return True def reexecute(self): return True ### ### Atomic term processing (ISO 8.16)", "validity # only in the context of the copy_term/2 built-in mgu = core.unify(t2,", "head, body), c) def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity", "is true iff X is a member of the set CT.''' def execute(self,", "if isinstance(after, Atomic): self.data = [(d, p) for (d, p) in self.data if", "Atomic, Variable, Compound, List from ..parser import isvariable, isatom, isnumber, islist, ispartiallist, iscallable", "arguments of T.''' from ..core import deref if isinstance(term, Variable): if term.isfree(): return", "the arguments of Term.\"\"\" def execute(self, term, elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error()", "and destruction (ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' : Retract_1,", "not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if (not isvariable(after) and not (isnumber(after)", "not isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head, Compound)", "functor: from ..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core import", "is found is undefined.''' def execute(self, template, goal, instances): fvs = free_variable_set(goal, template)", "..parser import isvariable, isatom, isnumber, islist, ispartiallist, iscallable from ..core import BuiltIn ###", "NSTO (Not Subject To Occur-check) then '='(X, Y) is true iff X and", "if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances) and (not islist(instances)", "template, goal, instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances)", "of E1 is less than the corresponding arithmetic value of E2.\"\"\" def execute(self,", ": Findall_3, 'bagof/3' : Bagof_3, 'setof/3' : Setof_3, # Logic and control (ISO", "creation and decomposition (ISO 8.5) 'functor/3' : Functor_3, 'arg/3' : Arg_3, '=../2' :", "in which each list is found is undefined.''' def _create_solution_list(self, s): solutions =", "= [] self.clauses_to_remove = [] procedure = self.kb.search(head) if not procedure: return False", "if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1)", "s = copysign(1, x) return int(s) if isinstance(x, int) else s def float_integer_part(x):", "body) def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2': head = clause.value[1] body", "self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value) for code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars)))", "a clause renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu)", "because we need to manage TypeError errors (e.g. n as float) on our", "isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n >> s", "subatom): if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if", "of Head is public, and * there is a clause in the database", "after all existing clauses of the procedure whose predicate is equal to the", "8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true iff evaluating E1", "and X are identical terms.\"\"\" def execute(self, x, y): # The Python __eq__", "class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is true iff integer Length equals the", "less than the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2): if", "isinstance(length, Atomic): self.data = [(d, p) for (d, p) in self.data if len(d)", "Value) enables the value associated with a Prolog flag to be altered.''' def", "= core.unify(t2, t) if mgu is not None: if mgu: t2.apply(mgu) # Do", "= f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true iff", "else: return False else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is", "isinstance(y, float): from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') if not x and y", "if not isatom(name): self.throw_type_error('atom', name) # TODO Missing max_arity related errors if arity.value", "Goal * else G is T.''' if term.name == '^' and term.arity ==", "TypeError errors (e.g. n as float) on our own.''' if not isinstance(n, int):", "if isinstance(term, Variable): if term.isfree(): return {term} else: term = deref(term) if isinstance(term,", "= evaluate_expression(e2) return v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is", "bitnot} return d.get(name) def divide(x, y): '''Redefined w.r.t. Python because in ISO Prolog", "set of V and the existential variable set of T.''' vst = variable_set(t)", "or Y and X are identical terms.\"\"\" def execute(self, x, y): # The", "equals the number of characters of the name of the atom Atom.''' def", "self.reset_substitution() if not self.data: return False return self.pick_data(atom, before, length, after, subatom) def", "value of E1 is less than the corresponding arithmetic value of E2.\"\"\" def", "is true if X is a member of the set A or I", "rightshift(n, s): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g.", "== v if isvariable(t) and isvariable(v): return True if isinstance(t, Compound) and isinstance(v,", "true iff the character code for the one-char atom Char is Code.''' def", "order in which each list is found is undefined.''' def _create_solution_list(self, s): solutions", "'>'(E1, E2) is true iff evaluating E1 and E2 as expressions the corresponding", "PrologEvaluationError raise PrologEvaluationError('undefined') return float(x ** y) def logarithm(x): '''Redefined w.r.t. Python because", "of the copy_term/2 built-in if t2.name in mgu: # Still preserve the binding", "float): from ..core import PrologTypeError raise PrologTypeError('float', Atomic(x)) from math import modf f,", "y def bitnot(x): '''Redefined w.r.t. Python because we need to manage TypeError errors", "remove from the database the procedure specified by the predicate indicator Pred and", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(s, int): from ..core import", ": Abolish_1, # All solutions (ISO 8.10) 'findall/3' : Findall_3, 'bagof/3' : Bagof_3,", "a predicate indicator for one of the user-defined procedures in the database.''' def", "isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff X is not", "self.unify(codelist, List.from_list(lst)) ### ### Implementation defined hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag,", "search_evaluable_functor(pi) if not functor: from ..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args)", "float) on our own.''' if not isinstance(x, int): from ..core import PrologTypeError raise", "this should use write_canonical/1 lst = [Atomic(ord(c)) for c in chars] return self.unify(codelist,", "Atom_2, Atom_12) is true iff characters of the name of the atom Atom_12", "occurring in Template or Goal would be instantiated by successive re-executions of \"call(Goal),", "= [(n1, n2, n1 + n2)] return self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1,", "before) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value", "defined as the union of the variable set of V and the existential", "class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true iff evaluating E1 and E2", "are identical terms.\"\"\" def execute(self, x, y): # The Python __eq__ method does", "class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering of two terms. '@=<'(X, Y) is", "where BV is a set of variables defined as the union of the", "pick_data(self, atom, before, length, after, subatom): s, p = self.data.pop(0) b = atom.name.index(s,", "value of evaluating Expression as an expression is Result.\"\"\" def execute(self, result, expression):", "Result.\"\"\" def execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result,", "= core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag) if value.name not in", "import deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values =", "< 0: self.throw_domain_error('not_less_than_zero', after) n = atom.name start = before.value if isinstance(before, Atomic)", "variables in that goal. Each list is a sorted list, but the order", "return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term, Arg) is true iff", "and the existential variables set of the term G * else EV is", "execute(self, flag, value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and not", "from .. import core if core.unify(pi, indicator) is not None: self.indicators.append(indicator) return self.pick_indicator(pi)", "self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if isinstance(name, Atomic) and arity.value == 0: return", "just in # case t2 were a renamed variable (e.g. coming # from", "database in the same state as if the procedure identified by Pred had", "or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom',", "and len(char.name) != 1: self.throw_type_error('character', char) if not isvariable(code) and not isinstance(code.value, int):", "### Arithmetic evaluation (ISO 8.6) ### Simple arithmetic functors (ISO 9.1) ### Other", "and isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero', before)", "X is a member of the set V.''' def execute(self, x): return isvariable(x)", "False return True return False PREDICATES = { # Term unification (ISO 8.2)", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not isinstance(y, int): from ..core", "isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if X is a member", "Atomic): self.data = [(d, p) for (d, p) in self.data if len(d) ==", "two terms. '@>'(X, Y) is true iff Y preceeds X.\"\"\" def execute(self, x,", "start = before.value if isinstance(before, Atomic) else 0 end = len(n) - (after.value", "modf(x) return i def float_fractional_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_fractional_part(x)", "of exiting from the processor and returning to whatever system invoked Prolog, passing", "our own.''' if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not", "iff X and Y are not unifiable.\"\"\" def execute(self, x, y): from ..", "intdivide(x, y): '''Redefined w.r.t. Python because in ISO Prolog x // y is", "decomposition (ISO 8.5) 'functor/3' : Functor_3, 'arg/3' : Arg_3, '=../2' : Univ_2, 'copy_term/2'", "not functor: from ..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core", "bitor, '\\\\/1' : bitnot} return d.get(name) def divide(x, y): '''Redefined w.r.t. Python because", ": Functor_3, 'arg/3' : Arg_3, '=../2' : Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic", "if (not isvariable(before) and not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before) if (not", "execute(self, x): return not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is true if X", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x % y def", "0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero', after) n =", "name of Sub_atom, and After is the number of characters of the name", "from ..core import deref args = (evaluate_expression(deref(a)) for a in term.value[1:]) pi =", "isinstance(t, Compound) and isinstance(v, Compound): if t.name != v.name or t.arity != v.arity:", "= atom.name.index(s, p) l = len(s) a = len(atom.name) - (b + l)", "and not a1.name.startswith('_'): a = bijection.get(a1) if a is not None and a2", "atom12) def pick_data(self, atom1, atom2, atom12): c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and", "the free variables in that goal. Each list is a sorted list, but", "terms. '\\=='(X, Y) is true iff X and Y are not identical terms.\"\"\"", "variables of the former to the variables of the latter such that the", "codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if", "self.throw_type_error('callable', goal) if (not isvariable(instances) and (not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances)", "x and y integers is equivalent to intdiv(x, y). Also, we need to", "is a float.''' if not isinstance(x, float): from ..core import PrologTypeError raise PrologTypeError('float',", ": Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic evaluation (ISO 8.6) 'is/2' : Is_2,", "variables.''' def execute(self, template, goal, instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal)", "= self._create_solution_list(s) if not self.s: return False return self.pick_bag(template, goal, instances) def reexecute(self,", "def execute(self, term, elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and", "'''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances) is true iff Instances unifies with the", "c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars", "'@>=/2' : TermGreaterThanOrEqual_2, # Term creation and decomposition (ISO 8.5) 'functor/3' : Functor_3,", "of variables defined recursively as follows: * if T is an atomic term,", "if isinstance(term, Variable): if isinstance(name, Atomic) and arity.value == 0: return self.unify(term, name)", "a variable then Sv is {T} * else if T is a compound", "islist, ispartiallist, iscallable from ..core import BuiltIn ### ### Term unification (ISO 8.2)", "\"\"\"'@<'(@term, @term) Test the ordering of two terms. '@<'(X, Y) is true iff", "is used to remove those unifying clauses from the database.''' def execute(self, clause):", "Arity, or * Term is an atomic term equal to Name and Arity", "the database.''' def execute(self, pi): if not isvariable(pi) and not (pi.name == '/'", "a compound term and List is the list whose head is the functor", "a list whose elements are the one-char atoms corresponding to a character sequence", "unification (ISO 8.2) '=/2' : Unify_2, '\\=/2' : NotUnifiable_2, # Type testing (ISO", "self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s = s_next return self.unify(t_list, instances)", "self.throw_type_error('number', number) if isvariable(number) and not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) if", "PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not isinstance(y, int): from ..core import PrologTypeError raise", "self.throw_instantiation_error() if not isvariable(x) and not isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer', x)", "= self.s[0] wt_list = [e for e in self.s if isvariant(wt.value[1], e.value[1])] t_list", "and AtomR, such that Before is the number of characters of the name", "the name of the atom Atom_1.''' def execute(self, atom1, atom2, atom12): if isvariable(atom1)", "if not isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c = chr(code.value)", "# All solutions (ISO 8.10) 'findall/3' : Findall_3, 'bagof/3' : Bagof_3, 'setof/3' :", "search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify =", "is greater than or equal to the corresponding arithmetic value of E2.\"\"\" def", "= set() if isinstance(term, Compound): for arg in term.value[1:]: s.update(variable_set(arg)) else: # a", "** y with x < 0 is defined only when y is an", "TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term creation and decomposition (ISO", "in which predicate indicators are found by # current_predicate/1 is implementation dependent i", "is None ### ### Type testing (ISO 8.3) ### class Var_1(BuiltIn): '''var(@term) var(X)", "enables the value associated with a Prolog flag to be altered.''' def execute(self,", "False return self.pick_data(atom, before, length, after, subatom) def reexecute(self, atom, before, length, after,", "self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars = [Atomic(c) for c", "return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity == 0: return Atomic(head.name) from ..core", "terms.\"\"\" def execute(self, x, y): # The Python __eq__ method does not hold", "convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is not None and core.unify(b, body) is not", "and arity Arity, or * Term is an atomic term equal to Name", "Compound): if t.name != v.name or t.arity != v.arity: return False bijection =", "are equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 =", "isvariable(pi): self.throw_instantiation_error() if pi.name == '/' and pi.arity == 2: name, arity =", "Compound) and len(elements) > 1: self.throw_type_error('atomic', elements.head) if isvariable(term) and elements == List.EMPTY_LIST:", "class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering of two terms. '=='(X, Y) is", "[(d, p) for (d, p) in self.data if len(d) == length.value] if isinstance(after,", "recursively as follows: * if T is a variable or an atomic term,", "is the union of the variable set of V and the existential variables", "ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival and information (ISO", "not self.indicators: return False # the order in which predicate indicators are found", "None and a2 != a: return False else: bijection[a1] = a2 else: if", "'\\=='(X, Y) is true iff X and Y are not identical terms.\"\"\" def", "with the list of values to which a variable X not occurring in", "propagate renamed term variables bindings # outside the context of the copy_term/2 built-in", "in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args) return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term,", "elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars = [Atomic(c) for c in atom.name]", "isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value) for code", "X in the former by Xs.''' from ..core import deref t = deref(t)", "pi = term.predicate_indicator() functor = search_evaluable_functor(pi) if not functor: from ..core import PrologTypeError", "p) - len(d) == after.value] if isinstance(subatom, Atomic): self.data = [(d, p) for", "goal, instances): wt = self.s[0] wt_list = [e for e in self.s if", "and not isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO Missing max_arity related error if", "t.arity != v.arity: return False bijection = {} for a1, a2 in zip(t.value[1:],", "TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2,", "is the list whose head is the functor name of Term and whose", "i in range(start, end + 1): self.data.append((n[start:i], start)) start += 1 if isinstance(before,", "list whose elements are the one-char atoms corresponding to a character sequence of", "number) if isvariable(number) and not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist):", "x, y): # The Python __ne__ method does not hold Prolog # semantics", "body): if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head,", "length, after, subatom): if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom',", "name of atom Atom.''' def execute(self, atom, charlist): if not isvariable(atom) and not", "vst.difference(vsv.union(est)) # TODO This should be distributed onto the Term hierarchy classes def", "'''atom(@term) atom(X) is true iff X is a member of the set A.'''", "in ISO Prolog sign(x) must return the same type of number as its", "# Atomic term processing (ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5'", "isvariable(code): c = ord(char.name) return self.unify(code, Atomic(c)) else: return ord(char.name) == code.value class", "for log(0)) on our own.''' if not x: from ..core import PrologEvaluationError raise", "w.r.t. Python because we need to manage TypeError errors (e.g. n as float)", "core.renamed_copy(t1) t = t1._copy_term() # Can't directly use BuiltIn.unify because the bindings #", "Var_1, 'atom/1' : Atom_1, 'integer/1' : Integer_1, 'float/1' : Float_1, 'atomic/1' : Atomic_1,", "self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is", "the context of the copy_term/2 built-in if t2.name in mgu: # Still preserve", "'_'): return True return x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the", "result = caller.solve_next() values = List.EMPTY_LIST if not values else List.from_list(values) return self.unify(values,", "true iff X and Y are not unifiable.\"\"\" def execute(self, x, y): from", "isvariable(t) and isvariable(v): return True if isinstance(t, Compound) and isinstance(v, Compound): if t.name", "isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist)", "Y preceeds X or Y and X are identical terms.\"\"\" def execute(self, x,", "= [e for e in self.s if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for", "the copy_term/2 built-in mgu = core.unify(t2, t) if mgu is not None: if", "import PrologParser, InvalidTermException s = ''.join([chr(code.value) for code in codelist.as_list()]) try: # the", "@term) Test the ordering of two terms. '=='(X, Y) is true iff X", "and core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body)", "V and the existential variable set of T.''' vst = variable_set(t) vsv =", "v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable,", "if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return", "leaving the database in the same state as if the procedure identified by", "of values to which a variable X not occurring in Template or Goal", "the processor and returning to whatever system invoked Prolog.''' def execute(self): exit(0) class", "Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true", "from .. import core for wwtt, t in zip(wt_list, t_list): ww = wwtt.value[1]", "reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi): if not self.indicators: return False", "assertz(Clause) is true. It is used to add Clause to the database after", "class Halt_0(BuiltIn): '''halt halt neither succeeds nor fails. It has the side effect", "deref if isinstance(term, Variable): if term.isfree(): return {term} else: term = deref(term) if", "iterated goal term G of a term T is a term defined recursively", "procedure is handled directly by the database from ..builtin import search_builtin if search_builtin(head):", "are not equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1", "= len(atom1.name) self.data = [(s[:i], s[i:], s)] else: n1 = atom1.name n2 =", "copy import deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst)", "is true if X is a member of the set I or F", "= [(d, p) for (d, p) in self.data if len(n) - n.index(d, p)", "free variable set FV of a term T with respect to a term", "return 0 if isinstance(x, int) else 0.0 from math import copysign s =", "?integer) functor(Term, Name, Arity) is true iff: * Term is a compound term", "in that goal. Each list is a sorted list, but the order in", "Y are NSTO (Not Subject To Occur-check) then '\\\\='(X, Y) is true iff", "isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and", "T unifies with ^(V, G) then EV is the union of the variable", ": intdivide, 'mod/2' : module, 'rem/2' : module, 'floor/1' : math.floor, 'round/1' :", "'''The existential variables set EV of a term T is a set of", "Atomic): return set() s = set() if isinstance(term, Compound): for arg in term.value[1:]:", "atom Atom can be broken into three pieces, AtomL, Sub_atom, and AtomR, such", "= Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # error on access", "concatenating the characters of the name of the atom Atom_2 to the characters", "we need to manage TypeError errors (e.g. n as float) on our own.'''", "self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom,", "# between the renamed copy of t1 and t2 retain validity # only", "PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(s, int): from ..core import PrologTypeError raise", "defined hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value)", "is the union of the variable sets for each of the arguments of", "name, arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if", "if fvs else Atomic('witness') g = iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template),", "isvariable(x): self.throw_instantiation_error() if not isvariable(x) and not isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer',", "isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char)) for char in atom.name] return self.unify(codelist, List.from_list(codes))", "isvariable(char): c = chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code): c = ord(char.name) return", "return self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1, atom2, atom12): c = self.data.pop(0) return", "def variable_set(term): '''The variable set Sv of a term T is a set", "variable set Sv of a term T is a set of variables defined", "variables in it. The elements of each list are in order of solution,", "value of E1 is greater than or equal to the corresponding arithmetic value", "e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return", "instances) def ground(term, mgu): if isinstance(term, Variable): if not term.value: value = mgu.get(term.name)", "X is a member of the set V, A, or CT.''' def execute(self,", "UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value) for code in codelist.as_list()] return self.unify(atom,", "isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom', name)", "if not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2) and not", "float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if X is a member of", "functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity) is true iff: * Term is a", "a2 != a: return False else: bijection[a1] = a2 else: if not isvariant(a1,", "TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2,", "'/\\\\/2' : bitand, '\\\\//2' : bitor, '\\\\/1' : bitnot} return d.get(name) def divide(x,", "body) def pick_clause(self, head, body): if not self.clauses: return False c = self.clauses.pop(0)", "int): return x // y return x / y def intdivide(x, y): '''Redefined", "X not occurring in Template or Goal would be instantiated by successive re-executions", "isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before) and not (isnumber(before) and", "a term defined recursively as follows: * if T unifies with ^(_, Goal)", "if isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not isatom(elements.head) and", "self.pick_bag(template, goal, instances) def pick_bag(self, template, goal, instances): wt = self.s[0] wt_list =", "isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not isinstance(name, Atomic):", "= tuple(Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) from ..builtin import", "Atomic) else 0 end = len(n) - (after.value if isinstance(after, Atomic) else 0)", "retain validity # only in the context of the copy_term/2 built-in mgu =", "Atomic('witness') g = iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S')) s", "== '_' and y.name == '_'): return True return x <= y class", "proper unify_with_occur_check/2 predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and", "islist(charlist): from ..parser import PrologParser, InvalidTermException s = ''.join([char.name for char in charlist.as_list()])", "'.').read_term() return self.unify(number, n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value))", "else: mgu.reduce() self.substitution.update(mgu) return True return False ### ### Arithmetic evaluation (ISO 8.6)", "def search_evaluable_functor(name): import math import operator d = {'+/2' : operator.add, '*/2' :", "currently associated with it.''' def execute(self, flag, value): from .. import core #", "if x < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import", "return False return self.pick_bag(template, goal, instances) def pick_bag(self, template, goal, instances): wt =", "* the predicate of Head is public, and * there is a clause", "and pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators = [] for i in self.kb:", "not ispartiallist(charlist): self.throw_type_error('list', charlist) for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element)", ": operator.sub, '-/1' : operator.neg, '//2' : divide, '///2' : intdivide, 'mod/2' :", "2: return iterated_goal_term(term.value[2]) return term def isvariant(t, v): '''Two terms are variants if", "what is probably a parser # bug: the operator's \"scope\" is much wider", "isinstance(code.value, int): self.throw_type_error('integer', code) if not isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if", "atom1, atom2, atom12): c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and", "ISO Prolog div(x, y) with x and y integers is equivalent to intdiv(x,", "'mod/2' : module, 'rem/2' : module, 'floor/1' : math.floor, 'round/1' : round, 'ceiling/1'", "self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true.", "else 0.0 from math import copysign s = copysign(1, x) return int(s) if", "w.r.t. Python because we need to manage ValueError errors (e.g. for x <", "before, length, after, subatom) def pick_data(self, atom, before, length, after, subatom): s, p", "# instead of using the proper unify_with_occur_check/2 predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn):", "* Term is a compound term with a functor whose identifier is Name", "Python because we need to manage TypeError errors (e.g. x or y as", "from ..builtin import search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure',", "is implementation dependent i = self.indicators.pop() return self.unify(pi, i) ### ### Clause creation", "import PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name): import math import operator d =", "in the same state as if the procedure identified by Pred had never", "should be distributed onto the Term hierarchy classes def variable_set(term): '''The variable set", "retract(Clause) is true iff the database contains at least one dynamic procedure with", "= caller.solve(goal) while result: from copy import deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v))", "isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and not isatom(atom1):", "if isvariable(pi): self.throw_instantiation_error() if pi.name == '/' and pi.arity == 2: name, arity", "x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering of two terms.", "instances): self.reset_substitution() if not self.s: return False return self.pick_bag(template, goal, instances) def pick_bag(self,", "isvariable(number) and not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for element", "c in charlist.as_list()] return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom,", "import deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst)", "dynamic procedure with a clause Clause which unifies with Head :- Body. It", "mgu): if isinstance(term, Variable): if not term.value: value = mgu.get(term.name) if value: return", "!= 1: self.throw_type_error('character', char) if not isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer', code)", "the procedure whose predicate is equal to the functor of the head of", "from .. import core caller = core.Caller() caller._kb = self.kb values = []", "execute(self, x): return x.arity == 0 and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X)", "p) in self.data if d == subatom.value] if not self.data: return False return", "a list if elements.name == '.' and elements.arity == 2: if len(elements) ==", "the name of the atom Atom.''' def execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error()", "is true iff Y preceeds X.\"\"\" def execute(self, x, y): return x >", "core.unify(pi, indicator) is not None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return", "class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true iff evaluating E1 and E2", "and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if", "to a term H :- B which unifies with Head :- Body.''' def", "our own.''' if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x,", "codelist) if islist(codelist): for element in codelist.as_list(): if not isvariable(element): try: chr(element.value) except", "Atomic(c)) elif isvariable(code): c = ord(char.name) return self.unify(code, Atomic(c)) else: return ord(char.name) ==", "(ISO 9.3) ### Bitwise functors (ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result,", "evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1,", "is the functor name of Term and whose tail is a list of", "of the variables of the former to the variables of the latter such", "UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist): from ..parser import PrologParser, InvalidTermException s =", ": module, 'floor/1' : math.floor, 'round/1' : round, 'ceiling/1' : math.ceil, 'truncate/1' :", "float): from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') if not x and y <", "= iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S')) s = findall.substitution['S']", "'''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity) is true iff: *", "length.value] if isinstance(after, Atomic): self.data = [(d, p) for (d, p) in self.data", "- n.index(d, p) - len(d) == after.value] if isinstance(subatom, Atomic): self.data = [(d,", "self.data = [(d, p) for (d, p) in self.data if d == subatom.value]", "isinstance(term, Variable): # elements is a list if elements.name == '.' and elements.arity", "divide(x, y): '''Redefined w.r.t. Python because in ISO Prolog div(x, y) with x", "def reexecute(self, flag, value): self.reset_substitution() if not self.flags: return False return self.pick_flag(flag, value)", "Atomic term processing (ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is", "n, term, arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer',", "self.throw_type_error('callable', head) # error on access permission to a user-defined # procedure is", "def execute(self, atom1, atom2, atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and", "database after all existing clauses of the procedure whose predicate is equal to", "# for flags if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag)", "Term.\"\"\" def execute(self, term, elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements)", "self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither succeeds nor fails. It has the", "0: self.throw_domain_error('not_less_than_zero', after) n = atom.name start = before.value if isinstance(before, Atomic) else", "and not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for element in codelist.as_list(): if not", "and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X) is true iff X is a", "in ISO Prolog mod(x, y) is valid only when x and y are", "if isvariable(atom): from ..core import deref chars = [deref(c).name for c in charlist.as_list()]", "..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return", "self.throw_type_error('atom', atom2) if not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and", "n = atom.name start = before.value if isinstance(before, Atomic) else 0 end =", "isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x | y", "E1 and E2 as expressions the corresponding arithmetic values are equal.\"\"\" def execute(self,", "clause): if clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2] else: head", "of Sub_atom, and After is the number of characters of the name of", "isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2): s = atom12.name", "max_arity related error if isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term)", "self.reset_substitution() if not self.data: return False return self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1,", "for a1, a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'):", "used to add Clause to the database after all existing clauses of the", "If X and Y are NSTO (Not Subject To Occur-check) then '='(X, Y)", "return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and Y are NSTO", "is a list if elements.name == '.' and elements.arity == 2: if len(elements)", "return self.pick_data(atom, before, length, after, subatom) def reexecute(self, atom, before, length, after, subatom):", "v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable,", "false if X is a member of the set V or CT.''' def", "execute(self, char, code): if isvariable(char) and isvariable(code): self.throw_instantiation_error() if not isvariable(char) and len(char.name)", "atom12.name if not s.endswith(atom2.name): return False else: i = s.index(atom2.name) self.data = [(s[:i],", "variable set of T and BV where BV is a set of variables", "[] if s == List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3", "x & y def bitor(x, y): '''Redefined w.r.t. Python because we need to", "if isinstance(t, Atomic) and isinstance(v, Atomic): return t == v if isvariable(t) and", "arg(N, Term, Arg) is true iff the Nth argument of Term is Arg.'''", "and isvariable(atom2): s = atom12.name self.data = [(s[:i], s[i:], s) for i in", "isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff X is a member of", "isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f for f in core._FLAGS.values()", "len(term.value): return False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term,", "is less than the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2):", "core #t = core.renamed_copy(t1) t = t1._copy_term() # Can't directly use BuiltIn.unify because", "size = Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom)", "not None and core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return", "isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n << s", "term G of a term T is a term defined recursively as follows:", "clause renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu) return", "instances) def reexecute(self, template, goal, instances): self.reset_substitution() if not self.s: return False return", "..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import log return log(x) def squareroot(x):", "member of the set CT.''' def execute(self, x): return isinstance(x, (Compound, List)) class", "is true iff List is a list whose elements correspond to the successive", "G * else EV is the empty set.''' s = set() if isinstance(term,", "# from a clause renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else:", "v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true iff evaluating E1 and", "evaluating Expression as an expression is Result.\"\"\" def execute(self, result, expression): if isvariable(expression):", "for arg in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args) return term class Bagof_3(BuiltIn):", "9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true iff the value", "= len(atom.name) - (b + l) return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and", "for (d, p) in self.data if n.index(d, p) == before.value] if isinstance(length, Atomic):", "bijection[a1] = a2 else: if not isvariant(a1, a2): return False return True return", "if the procedure identified by Pred had never existed.''' def execute(self, pi): if", "not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not isinstance(y,", "ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for element in charlist.as_list(): if isatom(element) and len(element.name)", "sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' : power, 'sin/1' : math.sin,", "of the set CT.''' def execute(self, x): return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn):", "and y are integers. Also, we need to manage ZeroDivisionError errors on our", "for c in chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number,", "need to manage ZeroDivisionError errors on our own.''' if x < 0 and", ": math.exp, 'log/1' : logarithm, 'sqrt/1' : squareroot, '>>/2' : rightshift, '<</2' :", "False else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true iff", "class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It is used to add Clause to", "whose head is the functor name of Term and whose tail is a", "term def isvariant(t, v): '''Two terms are variants if there is a bijection", ": ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' :", "PI is a predicate indicator for one of the user-defined procedures in the", "math.trunc, 'float/1' : float, 'abs/1' : operator.abs, 'sign/1' : sign, 'float_integer_part/1' : float_integer_part,", "ordering of two terms. '\\=='(X, Y) is true iff X and Y are", "compound term and List is the list whose head is the functor name", "'setof/3' : Setof_3, # Logic and control (ISO 8.15) # FIXME \\+ does", "= t1._copy_term() # Can't directly use BuiltIn.unify because the bindings # between the", "raise PrologEvaluationError('undefined') from math import log return log(x) def squareroot(x): '''Redefined w.r.t. Python", "true iff: * the predicate of Head is public, and * there is", "defined recursively as follows: * if T is an atomic term, then Sv", "is true iff the character code for the one-char atom Char is Code.'''", "Variable): if term.isfree(): return {term} else: term = deref(term) if isinstance(term, Atomic): return", "body) is not None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body) def reexecute(self, head,", "arg in term.value[1:]: s.update(variable_set(arg)) else: # a list for e in term.as_list(): s.update(variable_set(e))", "copy of Term_1.''' def execute(self, t1, t2): from .. import core #t =", "'''var(@term) var(X) is true iff X is a member of the set V.'''", "characters of the name of the atom Atom.''' def execute(self, atom, length): if", "i in self.kb: n, a = i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a))) from", "for (d, p) in self.data if len(d) == length.value] if isinstance(after, Atomic): self.data", "and not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before) if (not isvariable(length) and not", "an expression is Result.\"\"\" def execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error() c =", "pick_clause(self, head, body): if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return", "import copysign s = copysign(1, x) return int(s) if isinstance(x, int) else s", "\"\"\"'@>(@term, @term) Test the ordering of two terms. '@>'(X, Y) is true iff", "not procedure: return False from .. import core for clause in procedure.clauses(): h,", "isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # errors on the conversion of the", "None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi):", "clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2] else: head = clause", "if isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity related", "Halt_0(BuiltIn): '''halt halt neither succeeds nor fails. It has the side effect of", "Atomic(s)) return x & y def bitor(x, y): '''Redefined w.r.t. Python because we", "a # goal and on access permission to a user-defined # procedure are", "import math import operator d = {'+/2' : operator.add, '*/2' : operator.mul, '-/2'", "return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither succeeds nor", "Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is true iff: * the predicate of Head", "its clauses, leaving the database in the same state as if the procedure", "self.throw_domain_error('not_less_than_zero', after) n = atom.name start = before.value if isinstance(before, Atomic) else 0", "< 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) if", "isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 == v2 class", "arity.value > 0: self.throw_type_error('atom', name) if isinstance(term, Atomic): return self.unify(term, name) and self.unify(arity,", "= evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable)", "v2 = evaluate_expression(e2) return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2)", "arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) # error on access permission to a user-defined", "if isinstance(term, Variable): # elements is a list if elements.name == '.' and", "functors (ISO 9.3) ### Bitwise functors (ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable)", "the functor of the head of Clause.''' def execute(self, clause): head = clause.value[1]", "= Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses_to_remove: return", "PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name): import math import operator d = {'+/2'", "def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2': head = clause.value[1] body =", "= variable_set(t) vsv = variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This", "not unifiable.\"\"\" def execute(self, x, y): from .. import core return core.unify(x, y)", "elements.head return self.unify(term, t) elif len(elements) > 1: name = elements.head.name t =", "import core caller = core.Caller() caller._kb = self.kb values = [] result =", "c in chars] return self.unify(codelist, List.from_list(lst)) ### ### Implementation defined hooks (ISO 8.17)", "def rightshift(n, s): '''Redefined w.r.t. Python because we need to manage TypeError errors", "e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) # FIXME this should use write_canonical/1 lst", "unifying clauses from the database.''' def execute(self, clause): if clause.predicate_indicator() == ':-/2': head", "?integer, ?atom) sub_atom(Atom, Before, Length, After, Sub_atom) is true iff atom Atom can", "else s def float_integer_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_integer_part(x) is", "self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances) and (not islist(instances) and not", "self.data if len(d) == length.value] if isinstance(after, Atomic): self.data = [(d, p) for", "if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for e in wt_list] s_next = [e", "after, subatom) def reexecute(self, atom, before, length, after, subatom): self.reset_substitution() if not self.data:", "self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ### ### All solutions", "flags if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and not", "a member of the set V, A, or CT.''' def execute(self, x): return", "v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true iff evaluating E1 and", "Atom.''' def execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom)", "of the variable set of T and BV where BV is a set", "former by Xs.''' from ..core import deref t = deref(t) v = deref(v)", "self.throw_type_error('atomic', elements.head) if isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing", "charlist) if islist(charlist): for element in charlist.as_list(): if isatom(element) and len(element.name) != 1:", "Prolog div(x, y) with x and y integers is equivalent to intdiv(x, y).", "All solutions (ISO 8.10) 'findall/3' : Findall_3, 'bagof/3' : Bagof_3, 'setof/3' : Setof_3,", "equal to the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2): if", "End Token n = PrologParser(s + '.').read_term() return self.unify(number, n) except InvalidTermException as", "terms. '=='(X, Y) is true iff X and Y are identical terms.\"\"\" def", "X is a member of the set A.''' def execute(self, x): return isatom(x)", "not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for element in codelist.as_list(): if not isvariable(element):", "y.name == '_'): return False return x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term)", "import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x & y def bitor(x, y): '''Redefined", "the database before all existing clauses of the procedure whose predicate is equal", "value): f = self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt", "* there is a clause in the database which corresponds to a term", "y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x, int): from ..core", "= deref(term) if isinstance(term, Atomic): return set() s = set() if isinstance(term, Compound):", "= [c.name for c in charlist.as_list()] return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom,", "= Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1,", "is true iff the Nth argument of Term is Arg.''' def execute(self, n,", "of the free variables in it. The elements of each list are in", "isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1) if", "8.6) ### Simple arithmetic functors (ISO 9.1) ### Other arithmetic functors (ISO 9.3)", "= [Atomic(c) for c in atom.name] return self.unify(charlist, List.from_list(chars)) else: chars = [c.name", "in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element)", "?list) findall(Template, Goal, Instances) is true iff Instances unifies with the list of", "to the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2): if isvariable(e1)", "@evaluable) '>='(E1, E2) is true iff evaluating E1 and E2 as expressions the", "core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not None} if not self.flags: return False return", "?callable_term) clause(Head, Body) is true iff: * the predicate of Head is public,", "= {f for f in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not None} if", "from copy import deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next()", "evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1,", "Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn): '''retract(+clause)", "True return False ### ### Arithmetic evaluation (ISO 8.6) ### Simple arithmetic functors", "element) if isvariable(atom): from ..core import deref chars = [deref(c).name for c in", "(ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name,", "characters of the name of the atom Atom_2 to the characters of the", "a clause in the database which corresponds to a term H :- B", "math import operator d = {'+/2' : operator.add, '*/2' : operator.mul, '-/2' :", "goal, instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances) and", "ispartiallist, iscallable from ..core import BuiltIn ### ### Term unification (ISO 8.2) ###", "always returns a float. Also, we need to manage ZeroDivisionError errors on our", "it. The elements of each list are in order of solution, but the", "write_canonical/1 lst = [Atomic(c) for c in chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn):", "* else if T is a variable then Sv is {T} * else", "TODO Missing max_arity related error if isinstance(term, Atomic): l = List(term) return self.unify(elements,", "the existential variable set of T.''' vst = variable_set(t) vsv = variable_set(v) est", "arity Arity, or * Term is an atomic term equal to Name and", "self.clauses.pop(0) return self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def", "STO terms by =/2 # instead of using the proper unify_with_occur_check/2 predicate. return", "not(Term) is true iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is", "# error on access permission to a user-defined # procedure is handled directly", "term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s def iterated_goal_term(term): '''The iterated", "(d, p) in self.data if len(d) == length.value] if isinstance(after, Atomic): self.data =", "name, arity = pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int):", "in self.data if d == subatom.value] if not self.data: return False return self.pick_data(atom,", "parser needs an End Token n = PrologParser(s + '.').read_term() return self.unify(number, n)", "is a member of the set CT.''' def execute(self, x): return isinstance(x, (Compound,", "isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom)", "Clause which unifies with Head :- Body. It is used to remove those", "is greater than the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2):", "values to which a variable X not occurring in Template or Goal would", "character sequence of Number which could be output.''' def execute(self, number, charlist): if", "Atomic term processing (ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' :", "'@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2'", "if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist)", "'sign/1' : sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' : power, 'sin/1'", "### ### All solutions (ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template,", "clause(Head, Body) is true iff: * the predicate of Head is public, and", "### ### Logic and control (ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is", "def execute(self, x, y): # TODO prologlib crashes if you attempt to unify", "p) == before.value] if isinstance(length, Atomic): self.data = [(d, p) for (d, p)", "== 2: if len(elements) == 1: t = elements.head return self.unify(term, t) elif", "isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances) and (not islist(instances) and", "a term T is a set of variables defined recursively as follows: *", "wt = self.s[0] wt_list = [e for e in self.s if isvariant(wt.value[1], e.value[1])]", "the binding for t2 just in # case t2 were a renamed variable", "as if the procedure identified by Pred had never existed.''' def execute(self, pi):", "ValueError errors (e.g. for log(0)) on our own.''' if not x: from ..core", "'functor/3' : Functor_3, 'arg/3' : Arg_3, '=../2' : Univ_2, 'copy_term/2' : CopyTerm_2, #", "true iff List is a list whose elements are the character codes corresponding", "atom12.name if not s.startswith(atom1.name): return False else: i = len(atom1.name) self.data = [(s[:i],", "CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI is a predicate indicator for one", "class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true iff the character code", "self.throw_type_error('atom', name) # TODO Missing max_arity related errors if arity.value < 0: self.throw_domain_error('not_less_than_zero',", "instantiation of the free variables in that goal. Each list is a sorted", "and term._isnumber(): return term.value if isinstance(term, Compound): from ..core import deref args =", "= self.data.pop(0) b = atom.name.index(s, p) l = len(s) a = len(atom.name) -", "be broken into three pieces, AtomL, Sub_atom, and AtomR, such that Before is", "is true iff Term_2 unifies with a term T which is a renamed", ": operator.mul, '-/2' : operator.sub, '-/1' : operator.neg, '//2' : divide, '///2' :", "?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true iff List is a list whose elements", "self.throw_type_error('callable', head) if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses = [] procedure", "altered.''' def execute(self, flag, value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not isvariable(flag)", "False c = self.clauses.pop(0) return self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head, body): return", "current_prolog_flag(Flag, Value) is true iff Flag is a flag supported by the processor,", "..core import PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name): import math import operator d", "2: if len(elements) == 1: t = elements.head return self.unify(term, t) elif len(elements)", "ZeroDivisionError errors on our own.''' if x < 0 and isinstance(y, float): from", "and on access permission to a user-defined # procedure are handled directly by", "self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom', name) #", "the database after all existing clauses of the procedure whose predicate is equal", "a set of variables defined as the union of the variable set of", "..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y, int): return x", "in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag',", "of the former to the variables of the latter such that the latter", "== '_' and y.name == '_'): return False return x == y class", "conversion of the clause body to a # goal and on access permission", "v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable,", "append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It is used to", "Atomic(c)) def evaluate_expression(term): # TODO No overflow/underflow errors # TODO No undefined errors", "and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(charlist) and not ispartiallist(charlist):", "PrologTypeError raise PrologTypeError('float', Atomic(x)) from math import modf f, i = modf(x) return", "Y) is true iff X and Y are not unifiable.\"\"\" def execute(self, x,", "v2 = evaluate_expression(e2) return v1 >= v2 ### ### Clause retrival and information", "'ceiling/1' : math.ceil, 'truncate/1' : math.trunc, 'float/1' : float, 'abs/1' : operator.abs, 'sign/1'", "if isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'): a = bijection.get(a1) if a is", "and not ispartiallist(charlist): self.throw_type_error('list', charlist) for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if", "False return self.pick_flag(flag, value) def pick_flag(self, flag, value): f = self.flags.pop() return self.unify(flag,", "!= v.name or t.arity != v.arity: return False bijection = {} for a1,", "pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True class", "self.data if len(n) - n.index(d, p) - len(d) == after.value] if isinstance(subatom, Atomic):", "atom_length(Atom, Length) is true iff integer Length equals the number of characters of", "# The Python __eq__ method does not hold Prolog # semantics for anonymous", "of the set A.''' def execute(self, x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X)", "anonymous variables if (isvariable(x) and isvariable(y) and x.name == '_' and y.name ==", "the variable set of T and BV where BV is a set of", "Clause retrival and information (ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1, #", ": Integer_1, 'float/1' : Float_1, 'atomic/1' : Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' :", "import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI is", "= [(d, p) for (d, p) in self.data if len(d) == length.value] if", "try: # the parser needs an End Token n = PrologParser(s + '.').read_term()", "= clause body = Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self, head, body): if", "iterated_goal_term(term): '''The iterated goal term G of a term T is a term", "isvariable(before) and not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before) if (not isvariable(length) and", "def sign(x): '''Redefined w.r.t. Python because in ISO Prolog sign(x) must return the", "as expressions the corresponding arithmetic value of E1 is greater than or equal", "of the set I or F and is false if X is a", "'var/1' : Var_1, 'atom/1' : Atom_1, 'integer/1' : Integer_1, 'float/1' : Float_1, 'atomic/1'", "atom2, atom12) def pick_data(self, atom1, atom2, atom12): c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0]))", "(e.g. coming # from a clause renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name :", "T.''' vst = variable_set(t) vsv = variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est)) #", "CT.''' def execute(self, x): return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is", "s_next return self.unify(t_list, instances) def _create_solution_list(self, s): return [] if s == List.EMPTY_LIST", "def execute(self, clause): head = clause.value[1] if clause.predicate_indicator() == ':-/2' else clause if", "procedure = self.kb.search(head) if not procedure: return False from .. import core for", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x & y def bitor(x,", "of each list are in order of solution, but the order in which", "if e not in wt_list] from .. import core for wwtt, t in", "errors on the conversion of the clause body to a # goal and", "values are equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1", ": bitnot} return d.get(name) def divide(x, y): '''Redefined w.r.t. Python because in ISO", "is the list whose only element is Term, or * Term is a", "and always returns a float. Also, we need to manage ZeroDivisionError errors on", "can be broken into three pieces, AtomL, Sub_atom, and AtomR, such that Before", "if X is a member of the set V or CT.''' def execute(self,", "is {T} * else if T is a compound term then Sv is", "of characters of the name of Sub_atom, and After is the number of", "= atom2.name self.data = [(n1, n2, n1 + n2)] return self.pick_data(atom1, atom2, atom12)", "which a variable X not occurring in Template or Goal would be instantiated", "= i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a))) from .. import core if core.unify(pi,", "try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value) for code in", "a member of the set A or I or F and is false", "flag) if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f for f", "a set of variables defined recursively as follows: * if T is a", "module(x, y): '''Redefined w.r.t. Python because in ISO Prolog mod(x, y) is valid", "of the name.''' def execute(self, atom, codelist): if not isvariable(atom) and not isatom(atom):", "return False bijection = {} for a1, a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1)", "and isinstance(before.value, int))): self.throw_type_error('integer', before) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value,", "'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' : power, 'sin/1' : math.sin, 'cos/1'", "true iff X is not a member of the set V.''' def execute(self,", "0 is defined only when y is an integer, and always returns a", "if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number)", "if not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before) and not", "testing (ISO 8.3) ### class Var_1(BuiltIn): '''var(@term) var(X) is true iff X is", "is equal to the functor of the head of Clause.''' def execute(self, clause):", "import PrologEvaluationError raise PrologEvaluationError('undefined') from math import log return log(x) def squareroot(x): '''Redefined", "and List is the list whose head is the functor name of Term", "not values else List.from_list(values) return self.unify(values, instances) def ground(term, mgu): if isinstance(term, Variable):", "successive characters of the name of atom Atom.''' def execute(self, atom, charlist): if", "[(s[:i], s[i:], s)] elif isvariable(atom2): s = atom12.name if not s.startswith(atom1.name): return False", "not a1.name.startswith('_'): a = bijection.get(a1) if a is not None and a2 !=", "..builtin import search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi)", "Atom can be broken into three pieces, AtomL, Sub_atom, and AtomR, such that", "such that the latter term results from replacing each variable X in the", "== y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering of two terms. '\\=='(X,", "solutions of a goal for each different instantiation of the free variables in", "8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances) is true iff", "in wt_list] s_next = [e for e in self.s if e not in", "G of a term T is a term defined recursively as follows: *", "are integers. Also, we need to manage ZeroDivisionError errors on our own.''' if", "import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(y, int): from ..core import PrologTypeError", "0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import sqrt return sqrt(x)", "s = atom12.name if not s.startswith(atom1.name): return False else: i = len(atom1.name) self.data", "if len(n) - n.index(d, p) - len(d) == after.value] if isinstance(subatom, Atomic): self.data", "not self.clauses: return False c = self.clauses.pop(0) return self.unify(Compound('clause', head, body), c) def", "term.name == '^' and term.arity == 2: return iterated_goal_term(term.value[2]) return term def isvariant(t,", "int): self.throw_type_error('integer', arity) # TODO Missing max_arity related error if isvariable(term) and arity.value", "not propagate renamed term variables bindings # outside the context of the copy_term/2", "Arithmetic evaluation (ISO 8.6) 'is/2' : Is_2, # Arithmetic comparison (ISO 8.7) '=:=/2'", "= ''.join([char.name for char in charlist.as_list()]) try: # the parser needs an End", "the database t = tuple(Variable('_') for i in range(arity.value)) c = Compound(name.name, *t)", "return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff X is", "(d, p) in self.data if d == subatom.value] if not self.data: return False", "two terms. '@<'(X, Y) is true iff X preceeds Y.\"\"\" def execute(self, x,", "if x < 0 and isinstance(y, float): from ..core import PrologEvaluationError raise PrologEvaluationError('undefined')", "self.indicators: return False # the order in which predicate indicators are found by", "X preceeds Y or X and Y are identical terms.\"\"\" def execute(self, x,", "if not isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer', code) if not isvariable(code): try:", "self.clauses = [] procedure = self.kb.search(head) if not procedure: return False if not", "reexecute(self): return True ### ### Atomic term processing (ISO 8.16) ### class AtomLength_2(BuiltIn):", "errors on our own.''' if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor')", "chars = [deref(c).name for c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or", "class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true iff the value of evaluating", "by new variables.''' def execute(self, template, goal, instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal):", "Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2, 'halt/0' :", "and not ispartiallist(codelist): self.throw_type_error('list', codelist) for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if", "n) if not isinstance(term, Compound): self.throw_type_error('compound', term) if n.value < 0: self.throw_domain_error('not_less_than_zero', n)", "self.data.append((n[start:i], start)) start += 1 if isinstance(before, Atomic): self.data = [(d, p) for", "hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value) enables", "not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(codelist) and", "self.kb.search(head) if not procedure: return False from .. import core for clause in", "self.kb.abolish(pi) return True ### ### All solutions (ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term,", "manage ZeroDivisionError errors on our own.''' if not y: from ..core import PrologEvaluationError", "integers is equivalent to intdiv(x, y). Also, we need to manage ZeroDivisionError errors", "self.throw_instantiation_error() if not islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and islist(elements)", "core.Caller() caller._kb = self.kb values = [] result = caller.solve(goal) while result: from", "for t2 just in # case t2 were a renamed variable (e.g. coming", "sub_atom(Atom, Before, Length, After, Sub_atom) is true iff atom Atom can be broken", "if isvariable(element): self.throw_instantiation_error() if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom):", ": float, 'abs/1' : operator.abs, 'sign/1' : sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' :", "List is a list whose elements correspond to the successive characters of the", "if isinstance(term, Atomic) or isvariable(term): return s if term.name == '^' and term.arity", "CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true iff Flag is a flag supported", "variables of the latter such that the latter term results from replacing each", "== List.EMPTY_LIST else s.as_list() solutions = list(set(solutions)) solutions.sort() return solutions ### ### Logic", "from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y, int): return", "class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and Y are NSTO (Not Subject To", "Test the ordering of two terms. '@>'(X, Y) is true iff Y preceeds", "length.value < 0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn):", "not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x |", "isvariable(atom2): s = atom12.name self.data = [(s[:i], s[i:], s) for i in range(len(s)", "evaluation (ISO 8.6) ### Simple arithmetic functors (ISO 9.1) ### Other arithmetic functors", "character codes corresponding to a character sequence of Number which could be output.'''", "'''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI is a predicate indicator for one of", "return False return self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1, atom2, atom12): c =", "(ISO 8.10) 'findall/3' : Findall_3, 'bagof/3' : Bagof_3, 'setof/3' : Setof_3, # Logic", "= Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn):", ".. import core caller = core.Caller() caller._kb = self.kb result = caller.solve(term) return", "modf f, i = modf(x) return f def power(x, y): '''Redefined w.r.t. Python", "codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char)) for char", "Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff X is a member of the set", "as a list the solutions of Goal for each different instantiation of the", "'-/1' : operator.neg, '//2' : divide, '///2' : intdivide, 'mod/2' : module, 'rem/2'", "is the empty set.''' s = set() if isinstance(term, Atomic) or isvariable(term): return", "..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x, int): from ..core import PrologTypeError", "procedure: return False from .. import core for clause in procedure.clauses(): h, b", "Prolog mod(x, y) is valid only when x and y are integers. Also,", "of V and the existential variables set of the term G * else", "Missing max_arity related error if isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if", "PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(y, int): from ..core import PrologTypeError raise", "list whose elements correspond to the successive characters of the name of atom", "and isinstance(v, Compound): if t.name != v.name or t.arity != v.arity: return False", "(isvariable(x) and isvariable(y) and x.name == '_' and y.name == '_'): return True", "= atom12.name self.data = [(s[:i], s[i:], s) for i in range(len(s) + 1)]", "from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') if not x and y < 0:", "the corresponding arithmetic value of E1 is less than or equal to the", "y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering of two terms. '@>'(X, Y)", "if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and not core._FLAGS[flag.name]:", "access permission to a user-defined # procedure is handled directly by the database", "0: self.throw_type_error('atom', name) if isinstance(term, Atomic): return self.unify(term, name) and self.unify(arity, Atomic(0)) if", "name) if isatom(name) and arity.value > 0: t = (Variable('_') for i in", "x): return x.arity == 0 and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X) is", "variables if (isvariable(x) and isvariable(y) and x.name == '_' and y.name == '_'):", "name of the atom Atom_12 are the result of concatenating the characters of", "@term) Test the ordering of two terms. '@=<'(X, Y) is true iff X", "Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause)", "execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom) if (not", "[] if s == List.EMPTY_LIST else s.as_list() solutions = list(set(solutions)) solutions.sort() return solutions", "logarithm, 'sqrt/1' : squareroot, '>>/2' : rightshift, '<</2' : leftshift, '/\\\\/2' : bitand,", "'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2'", "variable set of T.''' vst = variable_set(t) vsv = variable_set(v) est = existential_variable_set(t)", "as an expression is Result.\"\"\" def execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error() c", "a list the solutions of a goal for each different instantiation of the", "atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true iff", "list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(ord(c)) for c in", "CT.''' def execute(self, x): return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is true", "+ 1)] elif isvariable(atom1): s = atom12.name if not s.endswith(atom2.name): return False else:", "value else: return ground(term.binding(), mgu) if isinstance(term, Compound): args = [] for arg", "elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity related error if isinstance(term,", "ispartiallist(charlist): chars = [Atomic(c) for c in atom.name] return self.unify(charlist, List.from_list(chars)) else: chars", "the set V.''' def execute(self, x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is", "X.\"\"\" def execute(self, x, y): return x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term)", "'@>'(X, Y) is true iff Y preceeds X.\"\"\" def execute(self, x, y): return", "atom Atom, and the value of each element is the character code for", "isatom(flag): self.throw_type_error('atom', flag) from .. import core # for flags if flag.name not", "in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char)) for", "sqrt return sqrt(x) def rightshift(n, s): '''Redefined w.r.t. Python because we need to", "name of atom Atom, and the value of each element is the character", "the latter term results from replacing each variable X in the former by", "and a2 != a: return False else: bijection[a1] = a2 else: if not", "if T is a variable or an atomic term, then EV is the", "and all its clauses, leaving the database in the same state as if", "# FIXME \\+ does not work because of what is probably a parser", "if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = []", "Body.''' def execute(self, head, body): if isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head)", "subatom): self.reset_substitution() if not self.data: return False return self.pick_data(atom, before, length, after, subatom)", "parentheses! '\\+/1' : Not_1, 'not/1' : Not_1, 'repeat/0' : Repeat_0, # Atomic term", "if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # error on access permission to", "isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not isatom(elements.head) and len(elements)", "and Y are NSTO (Not Subject To Occur-check) then '='(X, Y) is true", "(core.unify(h, head) is not None and core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause', h,", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(y, int): from", "true iff atom Atom can be broken into three pieces, AtomL, Sub_atom, and", "valid only when x and y are integers. Also, we need to manage", "self.throw_type_error('atom', atom) if not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before)", "could be output.''' def execute(self, number, charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if", "else: chars = list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(ord(c))", "could be output.''' def execute(self, number, codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if", "The Python __ne__ method does not hold Prolog # semantics for anonymous variables", "class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true iff List is a", "a = i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a))) from .. import core if", "f.allowed: culprit = Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True", "undefined.''' def execute(self, template, goal, instances): fvs = free_variable_set(goal, template) self.witness = Compound('witness',", "core return core.unify(x, y) is None ### ### Type testing (ISO 8.3) ###", "a term T with respect to a term V is a set of", ": TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' :", "X is not a member of the set V.''' def execute(self, x): return", "if not x and y < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined')", "set V.''' def execute(self, x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is true", "renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu) return True", "def execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if pi.name == '/' and pi.arity ==", "[] result = caller.solve(goal) while result: from copy import deepcopy v = ground(deepcopy(template),", "list if elements.name == '.' and elements.arity == 2: if len(elements) == 1:", "CopyTerm_2, # Arithmetic evaluation (ISO 8.6) 'is/2' : Is_2, # Arithmetic comparison (ISO", "List) is true iff: * Term is an atomic term and List is", ": Unify_2, '\\=/2' : NotUnifiable_2, # Type testing (ISO 8.3) 'var/1' : Var_1,", ": NotUnifiable_2, # Type testing (ISO 8.3) 'var/1' : Var_1, 'atom/1' : Atom_1,", ": Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' : Number_1, # Term", "length, after, subatom): self.reset_substitution() if not self.data: return False return self.pick_data(atom, before, length,", "evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO No overflow/underflow errors # TODO", "own.''' if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return", "characters of the name of AtomL, Length is the number of characters of", "== List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity related error if isinstance(term, Atomic):", "Test the ordering of two terms. '@=<'(X, Y) is true iff X preceeds", "== 0 and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if X", "a goal for each different instantiation of the free variables in that goal.", "[(d, p) for (d, p) in self.data if n.index(d, p) == before.value] if", "atom Atom.''' def execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom',", "v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values = List.EMPTY_LIST if", "Each list is a sorted list, but the order in which each list", "is true iff X preceeds Y.\"\"\" def execute(self, x, y): return x <", "passing the value of X as a message.''' def execute(self, x): if isvariable(x):", "Term is Arg.''' def execute(self, n, term, arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error()", "TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering of two terms. '@<'(X, Y) is true", "Python because in ISO Prolog float_fractional_part(x) is valid only when x is a", ">> s def leftshift(n, s): '''Redefined w.r.t. Python because we need to manage", "E1 is less than the corresponding arithmetic value of E2.\"\"\" def execute(self, e1,", "and not isinstance(code.value, int): self.throw_type_error('integer', code) if not isvariable(code): try: chr(code.value) except UnicodeDecodeError:", "False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List) is", "a compound term then Sv is the union of the variable sets for", "self.throw_type_error('atomic', name) if isvariable(term) and not isatom(name) and arity.value > 0: self.throw_type_error('atom', name)", "It is used to add Clause to the database before all existing clauses", "or an atomic term, then EV is the empty set * else if", "self.s if e not in wt_list] from .. import core for wwtt, t", "sign(x) must return the same type of number as its input.''' if not", "charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if", "one dynamic procedure with a clause Clause which unifies with Head :- Body.", "n = PrologParser(s + '.').read_term() return self.unify(number, n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s))", "PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core import PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name):", "False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true iff Term_2 unifies with", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x | y def bitnot(x):", "and not isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head,", "renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI is a predicate indicator", "from ..core import deref chars = [deref(c).name for c in charlist.as_list()] return self.unify(atom,", "= existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This should be distributed onto the Term", "arithmetic functors (ISO 9.3) ### Bitwise functors (ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term,", "and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not isatom(elements.head) and len(elements) >", "core.unify(b, body) is not None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body) def reexecute(self,", "est = existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This should be distributed onto the", "list whose elements are the one-char atoms whose names are the successive characters", "renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI is a", "= list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(ord(c)) for c", "at least one dynamic procedure with a clause Clause which unifies with Head", "corresponding arithmetic values are equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2):", "(not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances) from .. import core caller =", "class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true iff Flag is a flag", "self.unify(char, Atomic(c)) elif isvariable(code): c = ord(char.name) return self.unify(code, Atomic(c)) else: return ord(char.name)", "< 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x ** y) def", "the set I or F and is false if X is a member", "float_integer_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_integer_part(x) is valid only when", "member of the set V.''' def execute(self, x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term)", "of T.''' vst = variable_set(t) vsv = variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est))", "which corresponds to a term H :- B which unifies with Head :-", "of two terms. '@>'(X, Y) is true iff Y preceeds X.\"\"\" def execute(self,", "EV is the empty set.''' s = set() if isinstance(term, Atomic) or isvariable(term):", "renamed variable (e.g. coming # from a clause renaming) temp = mgu[t2.name] mgu.reduce()", "else G is T.''' if term.name == '^' and term.arity == 2: return", "Prolog x // y is valid only when x and y are integers.", "class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true iff List is a", "iff Y preceeds X.\"\"\" def execute(self, x, y): return x > y class", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(s, int): from", "= self.kb result = caller.solve(term) return not result class Repeat_0(BuiltIn): '''repeat repeat is", "0 and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if X is", "not None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self,", "used to remove those unifying clauses from the database.''' def execute(self, clause): if", "TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering of two terms. '@=<'(X, Y) is true", "List) is true iff List is a list whose elements correspond to the", "range(arity.value)) c = Compound(name.name, *t) return self.unify(term, c) return False class Arg_3(BuiltIn): '''arg(+integer,", "+character_list) number_chars(Number, List) is true iff List is a list whose elements are", "is true iff X and Y are identical terms.\"\"\" def execute(self, x, y):", "F and is false if X is a member of the set V,", "\"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List) is true iff: * Term is an", "isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x // y", "Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term, Arg) is true iff the Nth argument", "else: # a list for e in term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term):", "div(x, y) with x and y integers is equivalent to intdiv(x, y). Also,", ": math.trunc, 'float/1' : float, 'abs/1' : operator.abs, 'sign/1' : sign, 'float_integer_part/1' :", "to intdiv(x, y). Also, we need to manage ZeroDivisionError errors on our own.'''", "ISO Prolog x // y is valid only when x and y are", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not isinstance(y, int): from ..core import", "import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y, int): return x //", "result = caller.solve(goal) while result: from copy import deepcopy v = ground(deepcopy(template), caller.currsubst())", "Term_2) is true iff Term_2 unifies with a term T which is a", "8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2,", "def execute(self, head, body): if isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head) if", "(ISO 9.1) ### Other arithmetic functors (ISO 9.3) ### Bitwise functors (ISO 9.4)", "list the solutions of a goal for each different instantiation of the free", "in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars =", "def ground(term, mgu): if isinstance(term, Variable): if not term.value: value = mgu.get(term.name) if", "Atomic) and isinstance(v, Atomic): return t == v if isvariable(t) and isvariable(v): return", "List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff X is not a member", "to manage TypeError errors (e.g. x or y as float) on our own.'''", "Atom_1, 'integer/1' : Integer_1, 'float/1' : Float_1, 'atomic/1' : Atomic_1, 'compound/1' : Compound_1,", "t2 retain validity # only in the context of the copy_term/2 built-in mgu", "return True return False PREDICATES = { # Term unification (ISO 8.2) '=/2'", "values = [] result = caller.solve(goal) while result: from copy import deepcopy v", "body = clause.value[2] else: head = clause body = Atomic.TRUE return self.pick_clause(head, body)", "if isnumber(head): self.throw_type_error('callable', head) # error on access permission to a user-defined #", "element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(element): try: chr(element.value) except UnicodeDecodeError:", "one-char atoms corresponding to a character sequence of Number which could be output.'''", "'''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value) enables the value associated with a Prolog", "length) size = Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom,", "Sub_atom, and AtomR, such that Before is the number of characters of the", "# Can't directly use BuiltIn.unify because the bindings # between the renamed copy", "s.update(variable_set(e)) return s def existential_variable_set(term): '''The existential variables set EV of a term", "arithmetic values are equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error()", "* else if T unifies with ^(V, G) then EV is the union", "Term comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2,", "isinstance(x, int) else 0.0 from math import copysign s = copysign(1, x) return", "in range(len(s) + 1)] elif isvariable(atom1): s = atom12.name if not s.endswith(atom2.name): return", "if isinstance(name, Atomic) and arity.value == 0: return self.unify(term, name) if isatom(name) and", "our own.''' if not x: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math", "= evaluate_expression(e2) return v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is", "p) in self.data if len(d) == length.value] if isinstance(after, Atomic): self.data = [(d,", "code): if isvariable(char) and isvariable(code): self.throw_instantiation_error() if not isvariable(char) and len(char.name) != 1:", "isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x & y", "else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true iff Term_2", "length) if (not isvariable(after) and not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after) if", "self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero', after) n = atom.name", "Before is the number of characters of the name of AtomL, Length is", "return True ### ### Atomic term processing (ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom,", "isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer', arity) #", "the name of the atom Atom_12 are the result of concatenating the characters", "isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'): a = bijection.get(a1) if a is not", "PrologTypeError('float', Atomic(x)) from math import modf f, i = modf(x) return f def", "H :- B which unifies with Head :- Body.''' def execute(self, head, body):", "evaluate_expression(e2) return v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true", "if not isvariable(char) and len(char.name) != 1: self.throw_type_error('character', char) if not isvariable(code) and", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(s, int): from ..core", "head of Clause.''' def execute(self, clause): head = clause.value[1] if clause.predicate_indicator() == ':-/2'", "d == subatom.value] if not self.data: return False return self.pick_data(atom, before, length, after,", "isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer', arity) if not isatom(name):", "'''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true iff List is a list whose", "chars = list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(ord(c)) for", "of t1 and t2 retain validity # only in the context of the", "identical terms.\"\"\" def execute(self, x, y): # The Python __ne__ method does not", "TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering of two terms. '@>'(X, Y) is true", "not isinstance(code.value, int): self.throw_type_error('integer', code) if not isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code)", "self.pick_indicator(pi) def pick_indicator(self, pi): if not self.indicators: return False # the order in", "int))): self.throw_type_error('integer', before) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer',", "TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term creation and decomposition (ISO 8.5) 'functor/3' :", "isvariable(term): from ..core import PrologInstantiationError raise PrologInstantiationError() if term.arity == 0 and term._isnumber():", "of the copy_term/2 built-in mgu = core.unify(t2, t) if mgu is not None:", "AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2,", "isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not", "set_prolog_flag(Flag, Value) enables the value associated with a Prolog flag to be altered.'''", "'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It", "with x < 0 is defined only when y is an integer, and", "and control (ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff call(Term)", "union of the variable set of V and the existential variable set of", "instantiated by successive re-executions of \"call(Goal), X=Template\" after systematic replacement of all variables", "then '\\\\='(X, Y) is true iff X and Y are not unifiable.\"\"\" def", "isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <= v2 class", "isvariable(number) or islist(codelist): from ..parser import PrologParser, InvalidTermException s = ''.join([chr(code.value) for code", "not isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2) if", "the value of evaluating Expression as an expression is Result.\"\"\" def execute(self, result,", "int) class Float_1(BuiltIn): '''float(@term) float(X) is true iff X is a member of", "c = ord(char.name) return self.unify(code, Atomic(c)) else: return ord(char.name) == code.value class NumberChars_2(BuiltIn):", "of number as its input.''' if not x: return 0 if isinstance(x, int)", "n1 = atom1.name n2 = atom2.name self.data = [(n1, n2, n1 + n2)]", "codelist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist):", "the empty set * else if T unifies with ^(V, G) then EV", "s == List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles as", "is Arg.''' def execute(self, n, term, arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error() if", "control (ISO 8.15) # FIXME \\+ does not work because of what is", "member of the set V, A, or CT.''' def execute(self, x): return isnumber(x)", "x | y def bitnot(x): '''Redefined w.r.t. Python because we need to manage", "'assertz/1' : Assertz_1, 'retract/1' : Retract_1, 'abolish/1' : Abolish_1, # All solutions (ISO", "*elements.as_list()[1:]) return self.unify(term, t) else: return False else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term,", "bitand, '\\\\//2' : bitor, '\\\\/1' : bitnot} return d.get(name) def divide(x, y): '''Redefined", "each list are in order of solution, but the order in which each", "because we need to manage ValueError errors (e.g. for x < 0) on", "argument of Term is Arg.''' def execute(self, n, term, arg): if isvariable(n) or", "def pick_bag(self, template, goal, instances): wt = self.s[0] wt_list = [e for e", "w.r.t. Python because in ISO Prolog float_fractional_part(x) is valid only when x is", "self.throw_representation_error(code) if isvariable(char): c = chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code): c =", "if isvariable(x): self.throw_instantiation_error() if not isvariable(x) and not isnumber(x) and not isinstance(x.value, int):", "sets for each of the arguments of T.''' from ..core import deref if", "(self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn):", "of Term_1.''' def execute(self, t1, t2): from .. import core #t = core.renamed_copy(t1)", "clause.value[2] else: head = clause body = Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self,", "charlist.as_list()] return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is", "corresponding character of the name.''' def execute(self, atom, codelist): if not isvariable(atom) and", "is not None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body) def reexecute(self, head, body):", "args.append(ground(arg, mgu)) return Compound(term.name, *args) return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template,", "n << s def bitand(x, y): '''Redefined w.r.t. Python because we need to", "is a compound term with a functor whose identifier is Name and arity", "atomic(X) is true if X is a member of the set A or", "value: return value else: return ground(term.binding(), mgu) if isinstance(term, Compound): args = []", "not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x //", "term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args) return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list)", "x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff X is a", "List.from_list(values) return self.unify(values, instances) def ground(term, mgu): if isinstance(term, Variable): if not term.value:", "not ispartiallist(codelist): self.throw_type_error('list', codelist) for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not", "isinstance(x, int) and isinstance(y, int): return x // y return x / y", "the characters of the name of the atom Atom_2 to the characters of", "# The Python __ne__ method does not hold Prolog # semantics for anonymous", "'copy_term/2' : CopyTerm_2, # Arithmetic evaluation (ISO 8.6) 'is/2' : Is_2, # Arithmetic", "or CT.''' def execute(self, x): return isnumber(x) ### ### Term comparison (ISO 8.4)", "Prolog, passing the value of X as a message.''' def execute(self, x): if", "in the former by Xs.''' from ..core import deref t = deref(t) v", "to the functor of the head of Clause.''' def execute(self, clause): head =", "# TODO No overflow/underflow errors # TODO No undefined errors if isvariable(term): from", "integers. Also, we need to manage ZeroDivisionError errors on our own.''' if not", "name) if isvariable(term) and not isatom(name) and arity.value > 0: self.throw_type_error('atom', name) if", "Goal, Instances) is true iff Instances unifies with the list of values to", "self.clauses: return False c = self.clauses.pop(0) return self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head,", "with a functor whose identifier is Name and arity Arity, or * Term", "procedure whose predicate is equal to the functor of the head of Clause.'''", "value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag,", "a member of the set V or CT.''' def execute(self, x): return isinstance(x,", "X preceeds Y.\"\"\" def execute(self, x, y): return x < y class TermGreaterThan_2(BuiltIn):", "true iff Y preceeds X.\"\"\" def execute(self, x, y): return x > y", "return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true iff", "for code in codelist.as_list()]) try: # the parser needs an End Token n", "length) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return", "= evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable)", "= self.kb.search(head) if not procedure: return False if not procedure._public: pi = Compound('/',", "v2 = evaluate_expression(e2) return v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2)", "ISO Prolog x ** y with x < 0 is defined only when", "and isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length)", "arithmetic value of E1 is greater than the corresponding arithmetic value of E2.\"\"\"", "t) if mgu is not None: if mgu: t2.apply(mgu) # Do not propagate", "wt_list = [e for e in self.s if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2]", "or X and Y are identical terms.\"\"\" def execute(self, x, y): # The", "each list is found is undefined.''' def execute(self, template, goal, instances): fvs =", "isinstance(v, Atomic): return t == v if isvariable(t) and isvariable(v): return True if", "return ground(term.binding(), mgu) if isinstance(term, Compound): args = [] for arg in term.value[1:]:", "if (core.unify(h, head) is not None and core.unify(b, body) is not None): self.clauses.append(Compound('clause',", "BV where BV is a set of variables defined as the union of", "Name and Arity is 0.''' def execute(self, term, name, arity): if isvariable(term) and", "'@=<'(X, Y) is true iff X preceeds Y or X and Y are", "@term) If X and Y are NSTO (Not Subject To Occur-check) then '\\\\='(X,", "self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List) is true iff:", "self.data.pop(0) b = atom.name.index(s, p) l = len(s) a = len(atom.name) - (b", "of the name of the atom Atom_12 are the result of concatenating the", "(ISO 8.6) 'is/2' : Is_2, # Arithmetic comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2,", "A goal set_prolog_flag(Flag, Value) enables the value associated with a Prolog flag to", "AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation defined", "Prolog float_fractional_part(x) is valid only when x is a float.''' if not isinstance(x,", "head) # errors on the conversion of the clause body to a #", "Y are unifiable.\"\"\" def execute(self, x, y): # TODO prologlib crashes if you", "False if not procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from", "return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if isinstance(name, Atomic) and", "math.cos, 'atan/1' : math.atan, 'exp/1' : math.exp, 'log/1' : logarithm, 'sqrt/1' : squareroot,", "member of the set V.''' def execute(self, x): return not isvariable(x) class Number_1(BuiltIn):", "is true iff X is a member of the set V.''' def execute(self,", "clause): head = clause.value[1] if clause.predicate_indicator() == ':-/2' else clause if isvariable(head): self.throw_instantiation_error()", "binding for t2 just in # case t2 were a renamed variable (e.g.", "PrologInstantiationError() if term.arity == 0 and term._isnumber(): return term.value if isinstance(term, Compound): from", "y): from .. import core return core.unify(x, y) is None ### ### Type", "# a list for e in term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term): '''The", "== before.value] if isinstance(length, Atomic): self.data = [(d, p) for (d, p) in", "solutions = [] if s == List.EMPTY_LIST else s.as_list() solutions = list(set(solutions)) solutions.sort()", "body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It is used to remove", "math import modf f, i = modf(x) return i def float_fractional_part(x): '''Redefined w.r.t.", "raise PrologTypeError('integer', Atomic(n)) if not isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer',", "head) is not None and core.unify(b, body) is not None): self.clauses.append(Compound('clause', h, b))", "term.value[1:]: s.update(variable_set(arg)) else: # a list for e in term.as_list(): s.update(variable_set(e)) return s", "exit(x.value) # Utility functions def free_variable_set(t, v): '''The free variable set FV of", "arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term)", "Goal) then G is the iterated goal term of Goal * else G", "= self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true.", "effect of exiting from the processor and returning to whatever system invoked Prolog,", "clause.value[1] body = clause.value[2] else: head = clause body = Atomic.TRUE return self.pick_clause(head,", "Python __ne__ method does not hold Prolog # semantics for anonymous variables if", "self.unify(number, n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) # FIXME", "int): self.throw_type_error('integer', x) exit(x.value) # Utility functions def free_variable_set(t, v): '''The free variable", "return v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true iff", "isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) for", "self.data: return False return self.pick_data(atom, before, length, after, subatom) def pick_data(self, atom, before,", "for i in range(len(s) + 1)] elif isvariable(atom1): s = atom12.name if not", "template, goal, instances): self.reset_substitution() if not self.s: return False return self.pick_bag(template, goal, instances)", "to manage ValueError errors (e.g. for x < 0) on our own.''' if", "isvariable, isatom, isnumber, islist, ispartiallist, iscallable from ..core import BuiltIn ### ### Term", "if not self.s: return False return self.pick_bag(template, goal, instances) def reexecute(self, template, goal,", "if len(d) == length.value] if isinstance(after, Atomic): self.data = [(d, p) for (d,", "s): solutions = [] if s == List.EMPTY_LIST else s.as_list() solutions = list(set(solutions))", "= [(s[:i], s[i:], s) for i in range(len(s) + 1)] elif isvariable(atom1): s", "clause.value[1] body = clause.value[2] else: head = clause body = Atomic.TRUE if isvariable(head):", "if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x", "E2 as expressions the corresponding arithmetic values are equal.\"\"\" def execute(self, e1, e2):", "solutions (ISO 8.10) 'findall/3' : Findall_3, 'bagof/3' : Bagof_3, 'setof/3' : Setof_3, #", "+atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity) is true iff: * Term", "max_arity related errors if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) # error on access", "the corresponding character of the name.''' def execute(self, atom, codelist): if not isvariable(atom)", "and core.unify(b, body) is not None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body) def", "Missing max_arity related error if isinstance(term, Atomic): l = List(term) return self.unify(elements, l)", "and before.value < 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero',", "would be instantiated by successive re-executions of \"call(Goal), X=Template\" after systematic replacement of", "terms. '@=<'(X, Y) is true iff X preceeds Y or X and Y", "and pi.arity == 2: name, arity = pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error()", "and len(elements) > 1: self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head, Compound) and len(elements)", "prologlib crashes if you attempt to unify two STO terms by =/2 #", "isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not", "current_predicate/1 is implementation dependent i = self.indicators.pop() return self.unify(pi, i) ### ### Clause", "charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element in charlist.as_list(): if", "self.pick_clause(head, body) def reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self, head,", "has the side effect of exiting from the processor and returning to whatever", ": Repeat_0, # Atomic term processing (ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' :", "s.update(existential_variable_set(term.value[2])) return s return s def iterated_goal_term(term): '''The iterated goal term G of", "of characters of the name of AtomR.''' def execute(self, atom, before, length, after,", "islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for element in codelist.as_list(): if", "true iff Instances unifies with the list of values to which a variable", "of the name of AtomL, Length is the number of characters of the", "power, 'sin/1' : math.sin, 'cos/1' : math.cos, 'atan/1' : math.atan, 'exp/1' : math.exp,", "instances) from .. import core caller = core.Caller() caller._kb = self.kb values =", "E2 as expressions the corresponding arithmetic values are not equal.\"\"\" def execute(self, e1,", "before, length, after, subatom): if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and not isatom(atom):", "body): self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses: return", "isinstance(y, int): return x // y return x / y def intdivide(x, y):", "self.data = [(d, p) for (d, p) in self.data if n.index(d, p) ==", "head = clause.value[1] if clause.predicate_indicator() == ':-/2' else clause if isvariable(head): self.throw_instantiation_error() if", "start += 1 if isinstance(before, Atomic): self.data = [(d, p) for (d, p)", "TODO Missing max_arity related errors if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) # error", "goal, instances): self.reset_substitution() if not self.s: return False return self.pick_bag(template, goal, instances) def", "the former to the variables of the latter such that the latter term", "is true iff integer Length equals the number of characters of the name", ": TermGreaterThanOrEqual_2, # Term creation and decomposition (ISO 8.5) 'functor/3' : Functor_3, 'arg/3'", "set() s = set() if isinstance(term, Compound): for arg in term.value[1:]: s.update(variable_set(arg)) else:", "preceeds X or Y and X are identical terms.\"\"\" def execute(self, x, y):", ": Var_1, 'atom/1' : Atom_1, 'integer/1' : Integer_1, 'float/1' : Float_1, 'atomic/1' :", "(Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff X is not a", "Is_2, # Arithmetic comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2'", "never existed.''' def execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if pi.name == '/' and", "then '='(X, Y) is true iff X and Y are unifiable.\"\"\" def execute(self,", "arity) if not isatom(name): self.throw_type_error('atom', name) # TODO Missing max_arity related errors if", "return False return self.pick_bag(template, goal, instances) def reexecute(self, template, goal, instances): self.reset_substitution() if", "self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and not ispartiallist(codelist):", "float_fractional_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_fractional_part(x) is valid only when", "= bijection.get(a1) if a is not None and a2 != a: return False", "execute(self, head, body): if isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head) if not", "PrologTypeError('integer', Atomic(s)) return n << s def bitand(x, y): '''Redefined w.r.t. Python because", "# Arithmetic comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' :", "self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2': head", "self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and Y are NSTO (Not", "T is a compound term then Sv is the union of the variable", "..core import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI", "as follows: * if T is a variable or an atomic term, then", "'rem/2' : module, 'floor/1' : math.floor, 'round/1' : round, 'ceiling/1' : math.ceil, 'truncate/1'", "and isvariable(y) and x.name == '_' and y.name == '_'): return True return", "list is a sorted list, but the order in which each list is", "if isvariable(t) and isvariable(v): return True if isinstance(t, Compound) and isinstance(v, Compound): if", "caller._kb = self.kb values = [] result = caller.solve(goal) while result: from copy", ": TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term", "InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) # FIXME this should use", "s def bitand(x, y): '''Redefined w.r.t. Python because we need to manage TypeError", "import core for clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h,", "solutions.sort() return solutions ### ### Logic and control (ISO 8.15) ### class Not_1(BuiltIn):", "return False else: bijection[a1] = a2 else: if not isvariant(a1, a2): return False", "= clause.value[2] else: head = clause body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if", "clause if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # errors on the conversion", "isatom, isnumber, islist, ispartiallist, iscallable from ..core import BuiltIn ### ### Term unification", "ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true iff", "g = iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S')) s =", "ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true iff evaluating E1 and E2 as", "flag) self.flags = {f for f in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not", "self.unify(code, Atomic(c)) else: return ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number,", "y): '''Redefined w.r.t. Python because in ISO Prolog div(x, y) with x and", "head) # error on access permission to a user-defined # procedure is handled", "isinstance(before, Atomic): self.data = [(d, p) for (d, p) in self.data if n.index(d,", "flag, value): from .. import core # for flags if not isvariable(flag) and", "our own.''' if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n))", "not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value) for", "PrologTypeError('integer', Atomic(n)) if not isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s))", "^(_, Goal) then G is the iterated goal term of Goal * else", "t1._copy_term() # Can't directly use BuiltIn.unify because the bindings # between the renamed", ".. import core # for flags if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom',", "'///2' : intdivide, 'mod/2' : module, 'rem/2' : module, 'floor/1' : math.floor, 'round/1'", "(isvariable(x) and isvariable(y) and x.name == '_' and y.name == '_'): return False", "# # '\\\\+'(Term) is true iff call(Term) is false.\"\"\" def execute(self, term): if", "= search_evaluable_functor(pi) if not functor: from ..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return", "'=..'(Term, List) is true iff: * Term is an atomic term and List", "self.throw_type_error('number', number) if isvariable(number) and not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) if", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n >> s def leftshift(n,", "procedure = self.kb.search(head) if not procedure: return False if not procedure._public: pi =", "and E2 as expressions the corresponding arithmetic values are not equal.\"\"\" def execute(self,", "InvalidTermException s = ''.join([char.name for char in charlist.as_list()]) try: # the parser needs", "< 0: self.throw_domain_error('not_less_than_zero', arity) # error on access permission to a user-defined #", "# TODO This should be distributed onto the Term hierarchy classes def variable_set(term):", "self.throw_type_error('atom', name) if isinstance(term, Atomic): return self.unify(term, name) and self.unify(arity, Atomic(0)) if isinstance(term,", "isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom',", "v if isvariable(t) and isvariable(v): return True if isinstance(t, Compound) and isinstance(v, Compound):", "execute(self): return True def reexecute(self): return True ### ### Atomic term processing (ISO", "bindings # outside the context of the copy_term/2 built-in if t2.name in mgu:", "subatom) def reexecute(self, atom, before, length, after, subatom): self.reset_substitution() if not self.data: return", "own.''' if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int)", "Atomic): return t == v if isvariable(t) and isvariable(v): return True if isinstance(t,", "p) in self.data if n.index(d, p) == before.value] if isinstance(length, Atomic): self.data =", "and isinstance(v, Atomic): return t == v if isvariable(t) and isvariable(v): return True", "8.6) 'is/2' : Is_2, # Arithmetic comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2'", "(ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering of two terms.", "Do not propagate renamed term variables bindings # outside the context of the", "evaluate_expression(e2) return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true", "ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not", "# Clause retrival and information (ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1,", "iscallable(body)): self.throw_type_error('callable', body) self.clauses = [] procedure = self.kb.search(head) if not procedure: return", "corresponds to a term H :- B which unifies with Head :- Body.'''", "== code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true iff List", "EV of a term T is a set of variables defined recursively as", ": Bagof_3, 'setof/3' : Setof_3, # Logic and control (ISO 8.15) # FIXME", "in the database which corresponds to a term H :- B which unifies", "a member of the set V.''' def execute(self, x): return isvariable(x) class Atom_1(BuiltIn):", "execute(self, t1, t2): from .. import core #t = core.renamed_copy(t1) t = t1._copy_term()", "self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom', name) # TODO Missing max_arity related errors", "isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c = chr(code.value) return self.unify(char,", "own.''' if x < 0 and isinstance(y, float): from ..core import PrologEvaluationError raise", ".. import core #t = core.renamed_copy(t1) t = t1._copy_term() # Can't directly use", "Instances) is true iff Instances unifies with the list of values to which", "the ordering of two terms. '@>='(X, Y) is true iff Y preceeds X", "bitnot(x): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g. x", "'''Redefined w.r.t. Python because in ISO Prolog float_integer_part(x) is valid only when x", "Term hierarchy classes def variable_set(term): '''The variable set Sv of a term T", "execute(self, x, y): return x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the", "if isvariable(number) or islist(codelist): from ..parser import PrologParser, InvalidTermException s = ''.join([chr(code.value) for", "member of the set V or CT.''' def execute(self, x): return isinstance(x, Atomic)", "if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # errors on the conversion of", "c = self.clauses.pop(0) return self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head, body): return (convert_to_term(head),", "characters of the name of atom Atom, and the value of each element", "if not functor: from ..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from", "elements.name == '.' and elements.arity == 2: if len(elements) == 1: t =", "isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X) is true iff X is a member", "if core.unify(pi, indicator) is not None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution()", "'''Redefined w.r.t. Python because in ISO Prolog mod(x, y) is valid only when", "AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true iff List is a list", "evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1,", "unifiable.\"\"\" def execute(self, x, y): from .. import core return core.unify(x, y) is", "because we need to manage ValueError errors (e.g. for log(0)) on our own.'''", "for i in range(start, end + 1): self.data.append((n[start:i], start)) start += 1 if", "class Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff X is a member of the", "errors if isvariable(term): from ..core import PrologInstantiationError raise PrologInstantiationError() if term.arity == 0", "are the result of concatenating the characters of the name of the atom", "### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true iff the value of", "is true iff the value of evaluating Expression as an expression is Result.\"\"\"", "islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error()", "the database.''' def execute(self, clause): if clause.predicate_indicator() == ':-/2': head = clause.value[1] body", "V.''' def execute(self, x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff", "copy import deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values", "set * else if T is a variable then Sv is {T} *", "is an atomic term, then Sv is the empty set * else if", "self.throw_instantiation_error() if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars =", "isinstance(length.value, int))): self.throw_type_error('integer', length) if (not isvariable(after) and not (isnumber(after) and isinstance(after.value, int))):", "Functor_3, 'arg/3' : Arg_3, '=../2' : Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic evaluation", "== ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true iff the", "reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self, head, body): if not", "for c in chars] return self.unify(codelist, List.from_list(lst)) ### ### Implementation defined hooks (ISO", "self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses: return False c =", "Integer_1, 'float/1' : Float_1, 'atomic/1' : Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1,", "in ISO Prolog float_integer_part(x) is valid only when x is a float.''' if", "manage TypeError errors (e.g. n as float) on our own.''' if not isinstance(n,", "import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(s, int): from ..core import PrologTypeError", "clause body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # error", "a term T is a term defined recursively as follows: * if T", "(e.g. n as float) on our own.''' if not isinstance(n, int): from ..core", "?integer) atom_length(Atom, Length) is true iff integer Length equals the number of characters", "'retract/1' : Retract_1, 'abolish/1' : Abolish_1, # All solutions (ISO 8.10) 'findall/3' :", "to manage ZeroDivisionError errors on our own.''' if x < 0 and isinstance(y,", "int): self.throw_type_error('integer', n) if not isinstance(term, Compound): self.throw_type_error('compound', term) if n.value < 0:", "# \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true iff call(Term) is false.\"\"\" def execute(self,", "deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list", "(evaluate_expression(deref(a)) for a in term.value[1:]) pi = term.predicate_indicator() functor = search_evaluable_functor(pi) if not", "the database in the same state as if the procedure identified by Pred", "and the value of each element is the character code for the corresponding", "< 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import sqrt return", "built-in if t2.name in mgu: # Still preserve the binding for t2 just", "for clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is", "x): if isvariable(x): self.throw_instantiation_error() if not isvariable(x) and not isnumber(x) and not isinstance(x.value,", "v.value[1:]): if isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'): a = bijection.get(a1) if a", "database.''' def execute(self, clause): if clause.predicate_indicator() == ':-/2': head = clause.value[1] body =", "predicate of Head is public, and * there is a clause in the", "'''asserta(@clause) asserta(Clause) is true. It is used to add Clause to the database", "bug: the operator's \"scope\" is much wider than the single # goal, even", "y). Also, we need to manage ZeroDivisionError errors on our own.''' if not", "v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true iff evaluating", "if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if len(f.allowed) ==", "Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before, Length, After, Sub_atom)", "of Number which could be output.''' def execute(self, number, charlist): if isvariable(number) and", "def execute(self, x): return x.arity == 0 and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term)", "(ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true iff evaluating", "= [e for e in self.s if e not in wt_list] from ..", "#from copy import deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst)", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n << s def bitand(x,", "self.throw_type_error('integer', before) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length)", "# for flags if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name]", "indicators are found by # current_predicate/1 is implementation dependent i = self.indicators.pop() return", "predicate is equal to the functor of the head of Clause.''' def execute(self,", "execute(self, clause): head = clause.value[1] if clause.predicate_indicator() == ':-/2' else clause if isvariable(head):", "and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f for f in core._FLAGS.values() if", "self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO No overflow/underflow errors # TODO No undefined", "self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn):", "Atomic(n)) if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return", "T and BV where BV is a set of variables defined as the", "c = Compound(name.name, *t) from ..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi)", "isnumber(head): self.throw_type_error('callable', head) # errors on the conversion of the clause body to", "if not x: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import log", "== '/' and pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators = [] for i", "not hold Prolog # semantics for anonymous variables if (isvariable(x) and isvariable(y) and", "the ordering of two terms. '@>'(X, Y) is true iff Y preceeds X.\"\"\"", "(isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before) if (not isvariable(length) and not (isnumber(length) and", "and not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before) and before.value <", "Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from .. import core for clause in procedure.clauses(): h,", "self.throw_instantiation_error() if pi.name == '/' and pi.arity == 2: name, arity = pi.value[1:]", "on access permission to a user-defined # procedure are handled directly by the", "clause body = Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self, head, body): if not", "Atomic): l = List(term) return self.unify(elements, l) if isinstance(term, Compound): l = List.from_list([Atomic(term.name)]", "e.value[1])] t_list = [e.value[2] for e in wt_list] s_next = [e for e", "testing (ISO 8.3) 'var/1' : Var_1, 'atom/1' : Atom_1, 'integer/1' : Integer_1, 'float/1'", "is used to remove from the database the procedure specified by the predicate", "pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove =", "?term) copy_term(Term_1, Term_2) is true iff Term_2 unifies with a term T which", "islist(elements) and not isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom', elements.head) if islist(elements) and", "whose elements are the character codes corresponding to a character sequence of Number", "arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic', name)", "from math import copysign s = copysign(1, x) return int(s) if isinstance(x, int)", "of characters of the name of the atom Atom.''' def execute(self, atom, length):", "a member of the set F.''' def execute(self, x): return x.arity == 0", "procedure: return False if not procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure',", "..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core import PrologTypeError raise", "1: self.throw_type_error('character', element) if isvariable(number) or islist(charlist): from ..parser import PrologParser, InvalidTermException s", "each variable X in the former by Xs.''' from ..core import deref t", "self.throw_type_error('list', codelist) for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(element): try:", "* if T is a variable or an atomic term, then EV is", "if isnumber(term): self.throw_type_error('callable', term) from .. import core caller = core.Caller() caller._kb =", "unify two STO terms by =/2 # instead of using the proper unify_with_occur_check/2", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n >> s def leftshift(n, s):", "is a list whose elements are the character codes corresponding to a character", "PrologTypeError('integer', Atomic(x)) if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y))", "E1 and E2 as expressions the corresponding arithmetic value of E1 is greater", "return s def iterated_goal_term(term): '''The iterated goal term G of a term T", "true iff PI is a predicate indicator for one of the user-defined procedures", "Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and Y are NSTO (Not Subject To Occur-check)", "mgu is not None: if mgu: t2.apply(mgu) # Do not propagate renamed term", "execute(self, x, y): # The Python __ne__ method does not hold Prolog #", "not isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer', code) if not isvariable(code): try: chr(code.value)", "(not isvariable(after) and not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before) and", "not ispartiallist(instances))): self.throw_type_error('list', instances) from .. import core caller = core.Caller() caller._kb =", "flags if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if len(f.allowed)", "not self.flags: return False return self.pick_flag(flag, value) def pick_flag(self, flag, value): f =", "= list(set(solutions)) solutions.sort() return solutions ### ### Logic and control (ISO 8.15) ###", ": Not_1, 'not/1' : Not_1, 'repeat/0' : Repeat_0, # Atomic term processing (ISO", "s == List.EMPTY_LIST else s.as_list() solutions = list(set(solutions)) solutions.sort() return solutions ### ###", "two STO terms by =/2 # instead of using the proper unify_with_occur_check/2 predicate.", "### ### Term unification (ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X", "^(V, G) then EV is the union of the variable set of V", "for anonymous variables if (isvariable(x) and isvariable(y) and x.name == '_' and y.name", "(isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero',", "because in ISO Prolog x ** y with x < 0 is defined", "the clause body to a # goal and on access permission to a", "with ^(V, G) then EV is the union of the variable set of", "< 0) on our own.''' if x < 0: from ..core import PrologEvaluationError", "need to manage TypeError errors (e.g. x or y as float) on our", "y) is None ### ### Type testing (ISO 8.3) ### class Var_1(BuiltIn): '''var(@term)", "size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is true", "class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering of two terms. '@>'(X, Y) is", "isatom(name): self.throw_type_error('atom', name) # TODO Missing max_arity related errors if arity.value < 0:", "false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true iff call(Term) is false.\"\"\" def", "that goal. Each list is a sorted list, but the order in which", "intdivide, 'mod/2' : module, 'rem/2' : module, 'floor/1' : math.floor, 'round/1' : round,", "'^' and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s def iterated_goal_term(term):", "does not hold Prolog # semantics for anonymous variables if (isvariable(x) and isvariable(y)", "= elements.head.name t = Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else: return False else:", "a user-defined # procedure are handled directly by the database from ..builtin import", "start)) start += 1 if isinstance(before, Atomic): self.data = [(d, p) for (d,", "number, codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element in codelist.as_list():", "than the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2): if isvariable(e1)", "for each different instantiation of the free variables in that goal. Each list", "caller = core.Caller() caller._kb = self.kb result = caller.solve(term) return not result class", "'''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true iff List is a list whose", "p) for (d, p) in self.data if len(d) == length.value] if isinstance(after, Atomic):", "PrologParser, InvalidTermException s = ''.join([chr(code.value) for code in codelist.as_list()]) try: # the parser", "== 1: t = elements.head return self.unify(term, t) elif len(elements) > 1: name", "# TODO Missing max_arity related error if isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero',", "if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer', arity)", "'=../2' : Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic evaluation (ISO 8.6) 'is/2' :", "ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values = List.EMPTY_LIST if not values", "module, 'floor/1' : math.floor, 'round/1' : round, 'ceiling/1' : math.ceil, 'truncate/1' : math.trunc,", "\"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true iff evaluating E1 and E2 as expressions", "s[i:], s)] elif isvariable(atom2): s = atom12.name if not s.startswith(atom1.name): return False else:", "with ^(_, Goal) then G is the iterated goal term of Goal *", ">= v2 ### ### Clause retrival and information (ISO 8.8) ### class Clause_2(BuiltIn):", "Code.''' def execute(self, char, code): if isvariable(char) and isvariable(code): self.throw_instantiation_error() if not isvariable(char)", "isvariable(x) and not isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) # Utility", ".. import core for wwtt, t in zip(wt_list, t_list): ww = wwtt.value[1] #from", "'current_predicate/1' : CurrentPredicate_1, # Clause creation and destruction (ISO 8.9) 'asserta/1' : Asserta_1,", "= [] procedure = self.kb.search(head) if not procedure: return False if not procedure._public:", "UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c = chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code): c", "set A or I or F and is false if X is a", ": float_fractional_part, '**/2' : power, 'sin/1' : math.sin, 'cos/1' : math.cos, 'atan/1' :", "Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2,", "def execute(self, x, y): # The Python __eq__ method does not hold Prolog", "whose elements are the one-char atoms corresponding to a character sequence of Number", "copysign s = copysign(1, x) return int(s) if isinstance(x, int) else s def", "evaluate_expression(e2) return v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true", "isinstance(before, Atomic) else 0 end = len(n) - (after.value if isinstance(after, Atomic) else", "and y.name == '_'): return True return x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term,", "ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2,", "true iff List is a list whose elements are the one-char atoms whose", "islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error()", "caller.solve_next() values = List.EMPTY_LIST if not values else List.from_list(values) return self.unify(values, instances) def", "is true iff X is a member of the set A.''' def execute(self,", "isvariable(char) and isvariable(code): self.throw_instantiation_error() if not isvariable(char) and len(char.name) != 1: self.throw_type_error('character', char)", "not isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO Missing max_arity related error if isvariable(term)", "= List.EMPTY_LIST if not values else List.from_list(values) return self.unify(values, instances) def ground(term, mgu):", "execute(self, x, y): return x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the", "Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff X is a member of the set", "isinstance(term, Atomic): l = List(term) return self.unify(elements, l) if isinstance(term, Compound): l =", "template) self.witness = Compound('witness', *fvs) if fvs else Atomic('witness') g = iterated_goal_term(goal) findall", "def execute(self, char, code): if isvariable(char) and isvariable(code): self.throw_instantiation_error() if not isvariable(char) and", "because we need to manage TypeError errors (e.g. x or y as float)", "in self.data if len(d) == length.value] if isinstance(after, Atomic): self.data = [(d, p)", "x, y): from .. import core return core.unify(x, y) is None ### ###", "atom2.name self.data = [(n1, n2, n1 + n2)] return self.pick_data(atom1, atom2, atom12) def", "terms.\"\"\" def execute(self, x, y): # The Python __ne__ method does not hold", "assembles as a list the solutions of Goal for each different instantiation of", "two terms. '@>='(X, Y) is true iff Y preceeds X or Y and", "exiting from the processor and returning to whatever system invoked Prolog, passing the", "a term T which is a renamed copy of Term_1.''' def execute(self, t1,", "of the procedure whose predicate is equal to the functor of the head", "return self.pick_data(atom, before, length, after, subatom) def pick_data(self, atom, before, length, after, subatom):", "functor of the head of Clause.''' def execute(self, clause): head = clause.value[1] if", "% y def sign(x): '''Redefined w.r.t. Python because in ISO Prolog sign(x) must", "self.data = [(s[:i], s[i:], s) for i in range(len(s) + 1)] elif isvariable(atom1):", "AtomL, Length is the number of characters of the name of Sub_atom, and", "import deref args = (evaluate_expression(deref(a)) for a in term.value[1:]) pi = term.predicate_indicator() functor", "atom, codelist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if", "the number of characters of the name of the atom Atom.''' def execute(self,", "class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true iff evaluating E1 and E2", "False else: i = len(atom1.name) self.data = [(s[:i], s[i:], s)] else: n1 =", "BuiltIn ### ### Term unification (ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If", "Y) is true iff Y preceeds X.\"\"\" def execute(self, x, y): return x", "self.throw_type_error('integer', n) if not isinstance(term, Compound): self.throw_type_error('compound', term) if n.value < 0: self.throw_domain_error('not_less_than_zero',", "1: self.throw_type_error('atomic', elements.head) if isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO", "PrologEvaluationError raise PrologEvaluationError('undefined') if not x and y < 0: from ..core import", "associated with it.''' def execute(self, flag, value): from .. import core # for", "term.value if isinstance(term, Compound): from ..core import deref args = (evaluate_expression(deref(a)) for a", "not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list',", "= evaluate_expression(e2) return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is", "which unifies with Head :- Body. It is used to remove those unifying", "instances): wt = self.s[0] wt_list = [e for e in self.s if isvariant(wt.value[1],", "of AtomR.''' def execute(self, atom, before, length, after, subatom): if isvariable(atom): self.throw_instantiation_error() if", "I or F and is false if X is a member of the", "(b + l) return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and", "List.EMPTY_LIST else s.as_list() solutions = list(set(solutions)) solutions.sort() return solutions ### ### Logic and", "the set V or CT.''' def execute(self, x): return isinstance(x, Atomic) class Compound_1(BuiltIn):", "raise PrologTypeError('integer', Atomic(x)) if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer',", "from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x ** y) def logarithm(x): '''Redefined", "return False PREDICATES = { # Term unification (ISO 8.2) '=/2' : Unify_2,", "true iff X preceeds Y.\"\"\" def execute(self, x, y): return x < y", "y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering of two terms. '@<'(X, Y)", "integer, and always returns a float. Also, we need to manage ZeroDivisionError errors", "order in which each list is found is undefined.''' def execute(self, template, goal,", "bijection.get(a1) if a is not None and a2 != a: return False else:", "Y) is true iff Y preceeds X or Y and X are identical", "the operator's \"scope\" is much wider than the single # goal, even when", "terms. '@>='(X, Y) is true iff Y preceeds X or Y and X", "operator.add, '*/2' : operator.mul, '-/2' : operator.sub, '-/1' : operator.neg, '//2' : divide,", "x): return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff X is", "= [(s[:i], s[i:], s)] else: n1 = atom1.name n2 = atom2.name self.data =", "self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff the database", "in ISO Prolog x // y is valid only when x and y", "for e in self.s if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for e in", "return x >= y ### ### Term creation and decomposition (ISO 8.5) ###", "arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer', n) if", "existential variables set EV of a term T is a set of variables", "user-defined # procedure is handled directly by the database t = tuple(Variable('_') for", "y.name == '_'): return False return x >= y ### ### Term creation", "set of T.''' vst = variable_set(t) vsv = variable_set(v) est = existential_variable_set(t) return", "import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not isinstance(y, int): from ..core import PrologTypeError", "def execute(self, x): return not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is true if", "iterated goal term of Goal * else G is T.''' if term.name ==", "X or Y and X are identical terms.\"\"\" def execute(self, x, y): #", "is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true iff call(Term) is false.\"\"\"", "def reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self, head, body): if", "Prolog flag to be altered.''' def execute(self, flag, value): if isvariable(flag) or isvariable(value):", "is a member of the set V.''' def execute(self, x): return isvariable(x) class", "y ### ### Term creation and decomposition (ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar,", "not in wt_list] from .. import core for wwtt, t in zip(wt_list, t_list):", "F.''' def execute(self, x): return x.arity == 0 and isinstance(x.value, float) class Atomic_1(BuiltIn):", "iff List is a list whose elements correspond to the successive characters of", "x // y return x / y def intdivide(x, y): '''Redefined w.r.t. Python", "successive re-executions of \"call(Goal), X=Template\" after systematic replacement of all variables in X", "Subject To Occur-check) then '='(X, Y) is true iff X and Y are", "element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number',", "raise PrologTypeError('integer', Atomic(y)) return x % y def sign(x): '''Redefined w.r.t. Python because", "v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true iff evaluating", "Length equals the number of characters of the name of the atom Atom.'''", "isvariable(atom2): s = atom12.name if not s.startswith(atom1.name): return False else: i = len(atom1.name)", "+callable_term, ?list) findall(Template, Goal, Instances) is true iff Instances unifies with the list", "'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' : Retract_1, 'abolish/1' : Abolish_1, #", "predicate indicators are found by # current_predicate/1 is implementation dependent i = self.indicators.pop()", "should use write_canonical/1 lst = [Atomic(c) for c in chars] return self.unify(charlist, List.from_list(lst))", "code for the corresponding character of the name.''' def execute(self, atom, codelist): if", ": AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation", "Length, After, Sub_atom) is true iff atom Atom can be broken into three", "of T.''' from ..core import deref if isinstance(term, Variable): if term.isfree(): return {term}", "leftshift, '/\\\\/2' : bitand, '\\\\//2' : bitor, '\\\\/1' : bitnot} return d.get(name) def", "the order in which each list is found is undefined.''' def _create_solution_list(self, s):", "length): if isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length) and", "from .. import core caller = core.Caller() caller._kb = self.kb result = caller.solve(term)", "AtomR, such that Before is the number of characters of the name of", "not isvariant(a1, a2): return False return True return False PREDICATES = { #", "Expression) is true iff the value of evaluating Expression as an expression is", "solutions = list(set(solutions)) solutions.sort() return solutions ### ### Logic and control (ISO 8.15)", "creation and decomposition (ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic,", "for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(element): try: chr(element.value) except", "= [Atomic(ord(char)) for char in atom.name] return self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value)", "8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value) enables the value", "x < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import sqrt", "in Template or Goal would be instantiated by successive re-executions of \"call(Goal), X=Template\"", "def execute(self, template, goal, instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if", "Atomic(n)) if not isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return", ": AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' :", "caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values = List.EMPTY_LIST if not values else", "0) on our own.''' if x < 0: from ..core import PrologEvaluationError raise", "isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist)", "isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) for element in", "= [chr(code.value) for code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist):", "import modf f, i = modf(x) return i def float_fractional_part(x): '''Redefined w.r.t. Python", "if not term.value: value = mgu.get(term.name) if value: return value else: return ground(term.binding(),", "self.data = [(d, p) for (d, p) in self.data if len(n) - n.index(d,", "(not isvariable(before) and not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before) if (not isvariable(length)", "method does not hold Prolog # semantics for anonymous variables if (isvariable(x) and", "functor(*args) from ..core import PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name): import math import", "= [] while start <= end: for i in range(start, end + 1):", "indicator Pred and all its clauses, leaving the database in the same state", "Term is a compound term with a functor whose identifier is Name and", "y): '''Redefined w.r.t. Python because in ISO Prolog x ** y with x", "iff Term_2 unifies with a term T which is a renamed copy of", "number of characters of the name of Sub_atom, and After is the number", "'''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true iff List is a list whose", "x.name == '_' and y.name == '_'): return False return x >= y", "< v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true iff evaluating E1", "'@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term creation and", "x or y as float) on our own.''' if not isinstance(x, int): from", "self.reset_substitution() if clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2] else: head", "self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove = [] procedure = self.kb.search(head) if", "instances): fvs = free_variable_set(goal, template) self.witness = Compound('witness', *fvs) if fvs else Atomic('witness')", "def _create_solution_list(self, s): solutions = [] if s == List.EMPTY_LIST else s.as_list() solutions", "### Simple arithmetic functors (ISO 9.1) ### Other arithmetic functors (ISO 9.3) ###", "an atomic term, then EV is the empty set * else if T", "~x ### ### Arithmetic comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1,", "import operator d = {'+/2' : operator.add, '*/2' : operator.mul, '-/2' : operator.sub,", "corresponding to a character sequence of Number which could be output.''' def execute(self,", "replacement of all variables in X by new variables.''' def execute(self, template, goal,", "a member of the set CT.''' def execute(self, x): return isinstance(x, (Compound, List))", "whose elements correspond to the successive characters of the name of atom Atom,", "i = self.indicators.pop() return self.unify(pi, i) ### ### Clause creation and destruction (ISO", "After is the number of characters of the name of AtomR.''' def execute(self,", "a renamed variable (e.g. coming # from a clause renaming) temp = mgu[t2.name]", "..core import deref if isinstance(term, Variable): if term.isfree(): return {term} else: term =", "(ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' :", "E2 as expressions the corresponding arithmetic value of E1 is less than or", "set of T and BV where BV is a set of variables defined", "x < 0) on our own.''' if x < 0: from ..core import", "is the character code for the corresponding character of the name.''' def execute(self,", "v1 >= v2 ### ### Clause retrival and information (ISO 8.8) ### class", "in range(arity.value)) c = Compound(name.name, *t) from ..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify',", "are NSTO (Not Subject To Occur-check) then '\\\\='(X, Y) is true iff X", "# goal, even when using parentheses! '\\+/1' : Not_1, 'not/1' : Not_1, 'repeat/0'", "for code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes =", "..core import deref chars = [deref(c).name for c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars)))", "errors # TODO No undefined errors if isvariable(term): from ..core import PrologInstantiationError raise", "Python because in ISO Prolog x ** y with x < 0 is", "Goal, Instances) assembles as a list the solutions of Goal for each different", "atom1, atom2, atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error()", "[chr(code.value) for code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes", "if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from .. import core caller =", "not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if", "of the set V or CT.''' def execute(self, x): return isinstance(x, Atomic) class", "iff evaluating E1 and E2 as expressions the corresponding arithmetic values are equal.\"\"\"", ".. import core for clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body()) if", "'-/2' : operator.sub, '-/1' : operator.neg, '//2' : divide, '///2' : intdivide, 'mod/2'", "set F.''' def execute(self, x): return x.arity == 0 and isinstance(x.value, float) class", "SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2,", "term, elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and not ispartiallist(elements):", "'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2'", "x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff X is a", "chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c = chr(code.value) return self.unify(char, Atomic(c)) elif", "isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) from .. import", "is an integer, and always returns a float. Also, we need to manage", "ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true iff evaluating E1 and E2 as", "of concatenating the characters of the name of the atom Atom_2 to the", "if not self.clauses: return False c = self.clauses.pop(0) return self.unify(Compound('clause', head, body), c)", "of the atom Atom_1.''' def execute(self, atom1, atom2, atom12): if isvariable(atom1) and isvariable(atom12):", "s.endswith(atom2.name): return False else: i = s.index(atom2.name) self.data = [(s[:i], s[i:], s)] elif", "class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if X is a member of the", "Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff the database contains at least one dynamic", "be instantiated by successive re-executions of \"call(Goal), X=Template\" after systematic replacement of all", "return iterated_goal_term(term.value[2]) return term def isvariant(t, v): '''Two terms are variants if there", "list are in order of solution, but the order in which each list", ": Setof_3, # Logic and control (ISO 8.15) # FIXME \\+ does not", "be distributed onto the Term hierarchy classes def variable_set(term): '''The variable set Sv", "is a renamed copy of Term_1.''' def execute(self, t1, t2): from .. import", "value): from .. import core # for flags if not isvariable(flag) and not", "self.throw_type_error('character', char) if not isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer', code) if not", ">= len(term.value): return False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list)", "import BuiltIn ### ### Term unification (ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term)", "character code for the one-char atom Char is Code.''' def execute(self, char, code):", "(e.g. for x < 0) on our own.''' if x < 0: from", "isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error()", "iff X and Y are unifiable.\"\"\" def execute(self, x, y): # TODO prologlib", "a list whose elements are the one-char atoms whose names are the successive", "== '_'): return True return x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test", "if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error()", "the solutions of Goal for each different instantiation of the free variables in", "Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither succeeds nor fails. It", "with respect to a term V is a set of variables defined as", "pi) from .. import core for clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(),", "import PrologEvaluationError raise PrologEvaluationError('undefined') from math import sqrt return sqrt(x) def rightshift(n, s):", "chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value) for code in codelist.as_list()]", "when using parentheses! '\\+/1' : Not_1, 'not/1' : Not_1, 'repeat/0' : Repeat_0, #", "are found by # current_predicate/1 is implementation dependent i = self.indicators.pop() return self.unify(pi,", "from ..core import PrologTypeError raise PrologTypeError('float', Atomic(x)) from math import modf f, i", "end = len(n) - (after.value if isinstance(after, Atomic) else 0) self.data = []", "predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and Y are", "after, subatom): s, p = self.data.pop(0) b = atom.name.index(s, p) l = len(s)", "PrologParser, InvalidTermException s = ''.join([char.name for char in charlist.as_list()]) try: # the parser", "= free_variable_set(goal, template) self.witness = Compound('witness', *fvs) if fvs else Atomic('witness') g =", "not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x ###", "isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable',", "return x // y return x / y def intdivide(x, y): '''Redefined w.r.t.", "in chars] return self.unify(codelist, List.from_list(lst)) ### ### Implementation defined hooks (ISO 8.17) ###", "and isvariable(a2) and not a1.name.startswith('_'): a = bijection.get(a1) if a is not None", "isnumber(head): self.throw_type_error('callable', head) # error on access permission to a user-defined # procedure", "in wt_list] from .. import core for wwtt, t in zip(wt_list, t_list): ww", "PrologEvaluationError('undefined') from math import sqrt return sqrt(x) def rightshift(n, s): '''Redefined w.r.t. Python", "database from ..builtin import search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify',", "for arg in term.value[1:]: s.update(variable_set(arg)) else: # a list for e in term.as_list():", "of Term.\"\"\" def execute(self, term, elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not", "true iff evaluating E1 and E2 as expressions the corresponding arithmetic value of", "and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length) and length.value <", "'@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term creation and decomposition (ISO 8.5)", "TODO No overflow/underflow errors # TODO No undefined errors if isvariable(term): from ..core", "processor, and Value is the value currently associated with it.''' def execute(self, flag,", "else 0 end = len(n) - (after.value if isinstance(after, Atomic) else 0) self.data", "def execute(self, x, y): # The Python __ne__ method does not hold Prolog", "set of variables defined recursively as follows: * if T is a variable", "import PrologTypeError raise PrologTypeError('float', Atomic(x)) from math import modf f, i = modf(x)", "return self.pick_bag(template, goal, instances) def pick_bag(self, template, goal, instances): wt = self.s[0] wt_list", "int))): self.throw_type_error('integer', after) if isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length)", "and isvariable(code): self.throw_instantiation_error() if not isvariable(char) and len(char.name) != 1: self.throw_type_error('character', char) if", "evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1,", "self.reset_substitution() if not self.flags: return False return self.pick_flag(flag, value) def pick_flag(self, flag, value):", "'\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, #", "name) and self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity),", "true iff X is a member of the set V.''' def execute(self, x):", "invoked Prolog, passing the value of X as a message.''' def execute(self, x):", "w.r.t. Python because in ISO Prolog x ** y with x < 0", "float(X) is true iff X is a member of the set F.''' def", "values else List.from_list(values) return self.unify(values, instances) def ground(term, mgu): if isinstance(term, Variable): if", "or islist(charlist) or ispartiallist(charlist): chars = [Atomic(c) for c in atom.name] return self.unify(charlist,", "number, charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element in charlist.as_list():", "'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove = [] procedure = self.kb.search(head) if not", "CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true iff Term_2 unifies with a term", "and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist)", "the set V, A, or CT.''' def execute(self, x): return isnumber(x) ### ###", "free variables in it. The elements of each list are in order of", "convert_to_term(body)) def convert_to_term(head): if head.arity == 0: return Atomic(head.name) from ..core import renamed_copy", "when x and y are integers. Also, we need to manage ZeroDivisionError errors", "than or equal to the corresponding arithmetic value of E2.\"\"\" def execute(self, e1,", "= copysign(1, x) return int(s) if isinstance(x, int) else s def float_integer_part(x): '''Redefined", "existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This should be distributed onto the Term hierarchy", "or iscallable(body)): self.throw_type_error('callable', body) self.clauses = [] procedure = self.kb.search(head) if not procedure:", "self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither succeeds nor fails.", "and y.name == '_'): return False return x >= y ### ### Term", "information (ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause creation and", "Y are NSTO (Not Subject To Occur-check) then '='(X, Y) is true iff", "Still preserve the binding for t2 just in # case t2 were a", "= atom.name start = before.value if isinstance(before, Atomic) else 0 end = len(n)", "class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and Y are NSTO (Not Subject To", "'''retract(+clause) retract(Clause) is true iff the database contains at least one dynamic procedure", "after) n = atom.name start = before.value if isinstance(before, Atomic) else 0 end", "False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term, Arg) is true iff the", "float_fractional_part, '**/2' : power, 'sin/1' : math.sin, 'cos/1' : math.cos, 'atan/1' : math.atan,", "isinstance(v, Compound): if t.name != v.name or t.arity != v.arity: return False bijection", "isvariable(y) and x.name == '_' and y.name == '_'): return False return x", "To Occur-check) then '='(X, Y) is true iff X and Y are unifiable.\"\"\"", "the user-defined procedures in the database.''' def execute(self, pi): if not isvariable(pi) and", "X is a member of the set I.''' def execute(self, x): return x.arity", "atom) if not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before) and", "(isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if (not isvariable(after) and not (isnumber(after) and", "self.throw_type_error('integer', length) if (not isvariable(after) and not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after)", "x // y def module(x, y): '''Redefined w.r.t. Python because in ISO Prolog", "chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist): from ..parser import PrologParser, InvalidTermException", "chars = [Atomic(c) for c in atom.name] return self.unify(charlist, List.from_list(chars)) else: chars =", "isinstance(elements.head, Compound) and len(elements) > 1: self.throw_type_error('atomic', elements.head) if isvariable(term) and elements ==", "and elements.arity == 2: if len(elements) == 1: t = elements.head return self.unify(term,", "self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 != v2 class ArithmeticLessThan_2(BuiltIn):", "self.throw_type_error('atom', atom) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length)", "and (not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances) from .. import core caller", "(ISO 8.2) '=/2' : Unify_2, '\\=/2' : NotUnifiable_2, # Type testing (ISO 8.3)", "''.join([char.name for char in charlist.as_list()]) try: # the parser needs an End Token", "# only in the context of the copy_term/2 built-in mgu = core.unify(t2, t)", "ord(char.name) return self.unify(code, Atomic(c)) else: return ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number,", "the latter such that the latter term results from replacing each variable X", "= Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from .. import core for clause", "Compound(term.name, *args) return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances) assembles", "def execute(self, flag, value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and", "Value) is true iff Flag is a flag supported by the processor, and", "number of characters of the name of AtomR.''' def execute(self, atom, before, length,", "Also, we need to manage ZeroDivisionError errors on our own.''' if not y:", "of a term T is a set of variables defined recursively as follows:", "isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom',", "and not isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2)", "self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not", "atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list',", "X is a member of the set F.''' def execute(self, x): return x.arity", "for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number):", "not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(charlist) and", "atom1, atom2, atom12): self.reset_substitution() if not self.data: return False return self.pick_data(atom1, atom2, atom12)", "before all existing clauses of the procedure whose predicate is equal to the", "true iff X preceeds Y or X and Y are identical terms.\"\"\" def", "pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer', arity) if", "'is'(Result, Expression) is true iff the value of evaluating Expression as an expression", "is a set of variables defined recursively as follows: * if T is", "float_fractional_part(x) is valid only when x is a float.''' if not isinstance(x, float):", "not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) for element in charlist.as_list(): if isvariable(element):", "if isvariable(atom): chars = [chr(code.value) for code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif", "instantiation of the free variables in it. The elements of each list are", "and BV where BV is a set of variables defined as the union", "def squareroot(x): '''Redefined w.r.t. Python because we need to manage ValueError errors (e.g.", "def bitand(x, y): '''Redefined w.r.t. Python because we need to manage TypeError errors", "body) is not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self,", "PrologTypeError('integer', Atomic(s)) return n >> s def leftshift(n, s): '''Redefined w.r.t. Python because", "the conversion of the clause body to a # goal and on access", "unifies with ^(_, Goal) then G is the iterated goal term of Goal", "V, A, or CT.''' def execute(self, x): return isnumber(x) ### ### Term comparison", "(ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances) is true", "comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2'", "self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if", "greater than or equal to the corresponding arithmetic value of E2.\"\"\" def execute(self,", "not isatom(name) and arity.value > 0: self.throw_type_error('atom', name) if isinstance(term, Atomic): return self.unify(term,", "?integer, ?integer, ?atom) sub_atom(Atom, Before, Length, After, Sub_atom) is true iff atom Atom", "= [chr(code.value) for code in codelist.as_list()] return atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character,", "head, body): self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses:", "== '_' and y.name == '_'): return False return x >= y ###", "not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) for element in codelist.as_list(): if isvariable(element):", "if isinstance(x, int) and isinstance(y, int): return x // y return x /", "+atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is true iff characters of the name of", ": math.floor, 'round/1' : round, 'ceiling/1' : math.ceil, 'truncate/1' : math.trunc, 'float/1' :", "range(start, end + 1): self.data.append((n[start:i], start)) start += 1 if isinstance(before, Atomic): self.data", "List.from_list(codes)) else: chars = [chr(code.value) for code in codelist.as_list()] return atom.name == ''.join(chars)", "existential variables set of the term G * else EV is the empty", "invoked Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor fails.", "@evaluable) 'is'(Result, Expression) is true iff the value of evaluating Expression as an", "atom12) def reexecute(self, atom1, atom2, atom12): self.reset_substitution() if not self.data: return False return", "'//2' : divide, '///2' : intdivide, 'mod/2' : module, 'rem/2' : module, 'floor/1'", "greater than the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2): if", "except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) # FIXME this should", "member of the set I or F and is false if X is", "n >> s def leftshift(n, s): '''Redefined w.r.t. Python because we need to", "self.throw_domain_error('not_less_than_zero', arity) # error on access permission to a user-defined # procedure is", "the solutions of a goal for each different instantiation of the free variables", "with x and y integers is equivalent to intdiv(x, y). Also, we need", "Compound, List from ..parser import isvariable, isatom, isnumber, islist, ispartiallist, iscallable from ..core", "Instances) assembles as a list the solutions of Goal for each different instantiation", "return False else: i = s.index(atom2.name) self.data = [(s[:i], s[i:], s)] elif isvariable(atom2):", "raise PrologTypeError('integer', Atomic(s)) return n >> s def leftshift(n, s): '''Redefined w.r.t. Python", "clause.body()) if (core.unify(h, head) is not None and core.unify(b, body) is not None):", "if isvariable(number) or islist(charlist): from ..parser import PrologParser, InvalidTermException s = ''.join([char.name for", "solutions ### ### Logic and control (ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term)", "is the empty set * else if T is a variable then Sv", "= atom1.name n2 = atom2.name self.data = [(n1, n2, n1 + n2)] return", "the variables of the latter such that the latter term results from replacing", "Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' : Number_1, # Term comparison (ISO 8.4) '@=</2'", "'=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival and", "existential_variable_set(term): '''The existential variables set EV of a term T is a set", "for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number):", "0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x ** y) def logarithm(x):", "# TODO Missing max_arity related error if isinstance(term, Atomic): l = List(term) return", "list is found is undefined.''' def execute(self, template, goal, instances): fvs = free_variable_set(goal,", "a user-defined # procedure is handled directly by the database from ..builtin import", "(ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff call(Term) is false.\"\"\"", "self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag) if", "whose identifier is Name and arity Arity, or * Term is an atomic", "List from ..parser import isvariable, isatom, isnumber, islist, ispartiallist, iscallable from ..core import", "= {} for a1, a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2) and", "return self.unify(term, c) return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term, Arg)", "mgu = core.unify(t2, t) if mgu is not None: if mgu: t2.apply(mgu) #", "Atomic(s)) return n >> s def leftshift(n, s): '''Redefined w.r.t. Python because we", "Type testing (ISO 8.3) ### class Var_1(BuiltIn): '''var(@term) var(X) is true iff X", "v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true iff evaluating E1 and", "all existing clauses of the procedure whose predicate is equal to the functor", "to a user-defined # procedure are handled directly by the database from ..builtin", "TODO This should be distributed onto the Term hierarchy classes def variable_set(term): '''The", "import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x // y def module(x, y): '''Redefined", "dependent i = self.indicators.pop() return self.unify(pi, i) ### ### Clause creation and destruction", "try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist): from ..parser import PrologParser,", "if isinstance(length, Atomic): self.data = [(d, p) for (d, p) in self.data if", "term.value: value = mgu.get(term.name) if value: return value else: return ground(term.binding(), mgu) if", "chars = list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(c) for", "[] while start <= end: for i in range(start, end + 1): self.data.append((n[start:i],", "term.predicate_indicator() functor = search_evaluable_functor(pi) if not functor: from ..core import PrologTypeError raise PrologTypeError('evaluable',", "for the one-char atom Char is Code.''' def execute(self, char, code): if isvariable(char)", "to add Clause to the database after all existing clauses of the procedure", "and arity.value > 0: t = (Variable('_') for i in range(arity.value)) c =", "<= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true iff evaluating E1", "h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() ==", "List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l) if isinstance(term, Variable): # elements is a", "1: self.throw_permission_error('modify', 'flag', flag) if value.name not in f.allowed: culprit = Compound('+', flag,", "isvariable(n) or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer', n) if not isinstance(term,", "elements) if isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not isatom(elements.head)", "member of the set F.''' def execute(self, x): return x.arity == 0 and", "the name of AtomL, Length is the number of characters of the name", "= evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable)", "pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators = [] for i in self.kb: n,", "is true iff X and Y are unifiable.\"\"\" def execute(self, x, y): #", "much wider than the single # goal, even when using parentheses! '\\+/1' :", "search_evaluable_functor(name): import math import operator d = {'+/2' : operator.add, '*/2' : operator.mul,", "and decomposition (ISO 8.5) 'functor/3' : Functor_3, 'arg/3' : Arg_3, '=../2' : Univ_2,", "PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n << s def bitand(x, y): '''Redefined w.r.t.", "vsv = variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This should be", "atom2, atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if", "False PREDICATES = { # Term unification (ISO 8.2) '=/2' : Unify_2, '\\=/2'", "isinstance(subatom, Atomic): self.data = [(d, p) for (d, p) in self.data if d", "# elements is a list if elements.name == '.' and elements.arity == 2:", "add Clause to the database after all existing clauses of the procedure whose", "p = self.data.pop(0) b = atom.name.index(s, p) l = len(s) a = len(atom.name)", "class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances) is true iff Instances unifies", "c = evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO No overflow/underflow errors", "= { # Term unification (ISO 8.2) '=/2' : Unify_2, '\\=/2' : NotUnifiable_2,", "InvalidTermException s = ''.join([chr(code.value) for code in codelist.as_list()]) try: # the parser needs", "iff PI is a predicate indicator for one of the user-defined procedures in", "else List.from_list(values) return self.unify(values, instances) def ground(term, mgu): if isinstance(term, Variable): if not", "by Xs.''' from ..core import deref t = deref(t) v = deref(v) if", "atom(X) is true iff X is a member of the set A.''' def", "self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred)", "true iff evaluating E1 and E2 as expressions the corresponding arithmetic values are", "system invoked Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor", "isatom(name) and arity.value > 0: t = (Variable('_') for i in range(arity.value)) c", "Head :- Body. It is used to remove those unifying clauses from the", "convert_to_term(head): if head.arity == 0: return Atomic(head.name) from ..core import renamed_copy return renamed_copy(head)", "* else EV is the empty set.''' s = set() if isinstance(term, Atomic)", "a character sequence of Number which could be output.''' def execute(self, number, codelist):", "<= end: for i in range(start, end + 1): self.data.append((n[start:i], start)) start +=", "append=True) return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff the database contains", "of E1 is greater than or equal to the corresponding arithmetic value of", "for (d, p) in self.data if d == subatom.value] if not self.data: return", "l = len(s) a = len(atom.name) - (b + l) return (self.unify(before, Atomic(b))", "and isinstance(y, int): return x // y return x / y def intdivide(x,", "= core.Caller() caller._kb = self.kb result = caller.solve(term) return not result class Repeat_0(BuiltIn):", "fvs else Atomic('witness') g = iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g,", "n1 + n2)] return self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1, atom2, atom12): self.reset_substitution()", "y def module(x, y): '''Redefined w.r.t. Python because in ISO Prolog mod(x, y)", "isinstance(term, Compound): args = [] for arg in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name,", "self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer',", "in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number)", "(not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length) and", "E2.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1)", "8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is true iff: * the", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x ### ### Arithmetic comparison", "atomic term and List is the list whose only element is Term, or", "8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity)", ": rightshift, '<</2' : leftshift, '/\\\\/2' : bitand, '\\\\//2' : bitor, '\\\\/1' :", "findall(Template, Goal, Instances) is true iff Instances unifies with the list of values", "(ISO 8.3) ### class Var_1(BuiltIn): '''var(@term) var(X) is true iff X is a", "Compound(name.name, *t) return self.unify(term, c) return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N,", "8.3) ### class Var_1(BuiltIn): '''var(@term) var(X) is true iff X is a member", "not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before) and before.value < 0:", "true iff the Nth argument of Term is Arg.''' def execute(self, n, term,", "is a member of the set A.''' def execute(self, x): return isatom(x) class", "re-executable. ''' def execute(self): return True def reexecute(self): return True ### ### Atomic", "+ list(term.value[1:])) return self.unify(elements, l) if isinstance(term, Variable): # elements is a list", "our own.''' if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x))", "0.0 from math import copysign s = copysign(1, x) return int(s) if isinstance(x,", "of X as a message.''' def execute(self, x): if isvariable(x): self.throw_instantiation_error() if not", "is an atomic term equal to Name and Arity is 0.''' def execute(self,", "x % y def sign(x): '''Redefined w.r.t. Python because in ISO Prolog sign(x)", "e in term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term): '''The existential variables set EV", "iff X is a member of the set I.''' def execute(self, x): return", "n, a = i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a))) from .. import core", "of V and the existential variable set of T.''' vst = variable_set(t) vsv", "p) for (d, p) in self.data if d == subatom.value] if not self.data:", "codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if", "core.unify(t2, t) if mgu is not None: if mgu: t2.apply(mgu) # Do not", "the name of Sub_atom, and After is the number of characters of the", "Atomic(head.name) from ..core import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true", "for c in atom.name] return self.unify(charlist, List.from_list(chars)) else: chars = [c.name for c", "?list) '=..'(-nonvar, +list) '=..'(Term, List) is true iff: * Term is an atomic", "- len(d) == after.value] if isinstance(subatom, Atomic): self.data = [(d, p) for (d,", "Sv of a term T is a set of variables defined recursively as", "== ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true iff List", "processing (ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is true iff", "x): return isnumber(x) ### ### Term comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term,", "x and y < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x", "of the name of the atom Atom.''' def execute(self, atom, length): if isvariable(atom):", "corresponding arithmetic value of E1 is greater than or equal to the corresponding", "is true iff: * Term is a compound term with a functor whose", ": Retract_1, 'abolish/1' : Abolish_1, # All solutions (ISO 8.10) 'findall/3' : Findall_3,", "codelist.as_list()]) try: # the parser needs an End Token n = PrologParser(s +", "if isvariable(number): for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and", "self.indicators = [] for i in self.kb: n, a = i.split('/') indicator =", "from the processor and returning to whatever system invoked Prolog.''' def execute(self): exit(0)", "TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering of two terms. '=='(X, Y) is true", "def execute(self, x): return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true", "(ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' :", "procedure is handled directly by the database t = tuple(Variable('_') for i in", "'''Redefined w.r.t. Python because in ISO Prolog x // y is valid only", "clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2] else:", "and not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2): s = atom12.name self.data", "in range(start, end + 1): self.data.append((n[start:i], start)) start += 1 if isinstance(before, Atomic):", "flag, value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and not isatom(flag):", "if mgu: t2.apply(mgu) # Do not propagate renamed term variables bindings # outside", "and y integers is equivalent to intdiv(x, y). Also, we need to manage", "iterated_goal_term(term.value[2]) return term def isvariant(t, v): '''Two terms are variants if there is", "set V, A, or CT.''' def execute(self, x): return isnumber(x) ### ### Term", "return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses: return False c", "before) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if", "caller = core.Caller() caller._kb = self.kb values = [] result = caller.solve(goal) while", "not isinstance(x, float): from ..core import PrologTypeError raise PrologTypeError('float', Atomic(x)) from math import", "set() if isinstance(term, Compound): for arg in term.value[1:]: s.update(variable_set(arg)) else: # a list", "and control (ISO 8.15) # FIXME \\+ does not work because of what", "one of the user-defined procedures in the database.''' def execute(self, pi): if not", "contains at least one dynamic procedure with a clause Clause which unifies with", "isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2): s = atom12.name self.data = [(s[:i],", "and not isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before) and not (isnumber(before) and isinstance(before.value,", "0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after)", "self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is", "follows: * if T is an atomic term, then Sv is the empty", "= s.index(atom2.name) self.data = [(s[:i], s[i:], s)] elif isvariable(atom2): s = atom12.name if", "'@>='(X, Y) is true iff Y preceeds X or Y and X are", "the variable sets for each of the arguments of T.''' from ..core import", "a renamed copy of Term_1.''' def execute(self, t1, t2): from .. import core", "former to the variables of the latter such that the latter term results", "X is a member of the set I or F and is false", "caller._kb = self.kb result = caller.solve(term) return not result class Repeat_0(BuiltIn): '''repeat repeat", "c in atom.name] return self.unify(charlist, List.from_list(chars)) else: chars = [c.name for c in", "flag) from .. import core # for flags if flag.name not in core._FLAGS:", "self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char)) for char in atom.name]", "and information (ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause creation", "x: return 0 if isinstance(x, int) else 0.0 from math import copysign s", "..core import deref args = (evaluate_expression(deref(a)) for a in term.value[1:]) pi = term.predicate_indicator()", "database which corresponds to a term H :- B which unifies with Head", "8.9) 'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' : Retract_1, 'abolish/1' : Abolish_1,", "'==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2'", "Python because we need to manage ValueError errors (e.g. for log(0)) on our", "(ISO 8.6) ### Simple arithmetic functors (ISO 9.1) ### Other arithmetic functors (ISO", "Atomic) or isvariable(term): return s if term.name == '^' and term.arity == 2:", "public, and * there is a clause in the database which corresponds to", "EV is the union of the variable set of V and the existential", "in codelist.as_list(): if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or", "float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' : power, 'sin/1' : math.sin, 'cos/1' : math.cos,", "flag to be altered.''' def execute(self, flag, value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error()", "value of X as a message.''' def execute(self, x): if isvariable(x): self.throw_instantiation_error() if", "List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity related error if isinstance(term, Atomic): l", ": temp}) else: mgu.reduce() self.substitution.update(mgu) return True return False ### ### Arithmetic evaluation", "'''Redefined w.r.t. Python because in ISO Prolog sign(x) must return the same type", "mgu: # Still preserve the binding for t2 just in # case t2", "if isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not isinstance(name,", "is true iff X is a member of the set I.''' def execute(self,", "as follows: * if T unifies with ^(_, Goal) then G is the", "in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not None} if not self.flags: return False", "order of solution, but the order in which each list is found is", "self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list)", "Sv is {T} * else if T is a compound term then Sv", "if islist(charlist): for element in charlist.as_list(): if isatom(element) and len(element.name) != 1: self.throw_type_error('character',", "existed.''' def execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if pi.name == '/' and pi.arity", "permission to a user-defined # procedure is handled directly by the database t", "self.kb values = [] result = caller.solve(goal) while result: from copy import deepcopy", "0: self.throw_domain_error('not_less_than_zero', arity) # error on access permission to a user-defined # procedure", "Term comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering of", "of the set V.''' def execute(self, x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X)", "flag) if value.name not in f.allowed: culprit = Compound('+', flag, value) self.throw_domain_error('flag_value', culprit)", "### ### Type testing (ISO 8.3) ### class Var_1(BuiltIn): '''var(@term) var(X) is true", "equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1)", "'bagof/3' : Bagof_3, 'setof/3' : Setof_3, # Logic and control (ISO 8.15) #", "return True return False ### ### Arithmetic evaluation (ISO 8.6) ### Simple arithmetic", "each different instantiation of the free variables in it. The elements of each", "if isinstance(x, int) else 0.0 from math import copysign s = copysign(1, x)", "= core.Caller() caller._kb = self.kb values = [] result = caller.solve(goal) while result:", "iff atom Atom can be broken into three pieces, AtomL, Sub_atom, and AtomR,", "else if T is a variable then Sv is {T} * else if", "from ..core import PrologInstantiationError raise PrologInstantiationError() if term.arity == 0 and term._isnumber(): return", "w.r.t. Python because we need to manage TypeError errors (e.g. x or y", "s def existential_variable_set(term): '''The existential variables set EV of a term T is", "\"\"\"'=='(@term, @term) Test the ordering of two terms. '=='(X, Y) is true iff", "log(0)) on our own.''' if not x: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined')", "ISO Prolog mod(x, y) is valid only when x and y are integers.", "Char is Code.''' def execute(self, char, code): if isvariable(char) and isvariable(code): self.throw_instantiation_error() if", "'=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2'", "isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not", "len(s) a = len(atom.name) - (b + l) return (self.unify(before, Atomic(b)) and self.unify(length,", "FV of a term T with respect to a term V is a", "PrologParser(s + '.').read_term() return self.unify(number, n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars", "Term is a compound term and List is the list whose head is", "__ne__ method does not hold Prolog # semantics for anonymous variables if (isvariable(x)", "!= a: return False else: bijection[a1] = a2 else: if not isvariant(a1, a2):", "fails. It has the side effect of exiting from the processor and returning", "name.''' def execute(self, atom, codelist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom)", "Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag,", "return True return x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering", "term H :- B which unifies with Head :- Body.''' def execute(self, head,", "Before, Length, After, Sub_atom) is true iff atom Atom can be broken into", "of the name of Sub_atom, and After is the number of characters of", "E2 as expressions the corresponding arithmetic value of E1 is less than the", "f, i = modf(x) return i def float_fractional_part(x): '''Redefined w.r.t. Python because in", "self.throw_type_error('integer', code) if not isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c", "result of concatenating the characters of the name of the atom Atom_2 to", "and isinstance(elements.head, Compound) and len(elements) > 1: self.throw_type_error('atomic', elements.head) if isvariable(term) and elements", "iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S')) s = findall.substitution['S'] self.s", "work because of what is probably a parser # bug: the operator's \"scope\"", "side effect of exiting from the processor and returning to whatever system invoked", "not work because of what is probably a parser # bug: the operator's", "return x & y def bitor(x, y): '''Redefined w.r.t. Python because we need", "T is a variable or an atomic term, then EV is the empty", "..core import BuiltIn ### ### Term unification (ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term,", "is not None and core.unify(b, body) is not None): self.clauses.append(Compound('clause', h, b)) return", "deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values = List.EMPTY_LIST", "expressions the corresponding arithmetic values are not equal.\"\"\" def execute(self, e1, e2): if", "return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn):", "PrologEvaluationError raise PrologEvaluationError('undefined') from math import log return log(x) def squareroot(x): '''Redefined w.r.t.", "and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity related error if", "true iff the value of evaluating Expression as an expression is Result.\"\"\" def", "by the processor, and Value is the value currently associated with it.''' def", "if len(elements) == 1: t = elements.head return self.unify(term, t) elif len(elements) >", "an atomic term, then Sv is the empty set * else if T", "related error if isinstance(term, Atomic): l = List(term) return self.unify(elements, l) if isinstance(term,", "execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if pi.name == '/' and pi.arity == 2:", "isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(number) or islist(charlist): from ..parser", "corresponding arithmetic value of E1 is less than the corresponding arithmetic value of", "is probably a parser # bug: the operator's \"scope\" is much wider than", "..core import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x ** y) def logarithm(x): '''Redefined w.r.t.", "class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true iff evaluating E1 and E2", "goal, instances): fvs = free_variable_set(goal, template) self.witness = Compound('witness', *fvs) if fvs else", "PrologEvaluationError('undefined') from math import log return log(x) def squareroot(x): '''Redefined w.r.t. Python because", "in charlist.as_list()]) try: # the parser needs an End Token n = PrologParser(s", "isvariable(char) and len(char.name) != 1: self.throw_type_error('character', char) if not isvariable(code) and not isinstance(code.value,", "class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff X is not a member of", "of all variables in X by new variables.''' def execute(self, template, goal, instances):", "NSTO (Not Subject To Occur-check) then '\\\\='(X, Y) is true iff X and", "s) for i in range(len(s) + 1)] elif isvariable(atom1): s = atom12.name if", "x: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import log return log(x)", "def float_fractional_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_fractional_part(x) is valid only", "self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term) and", "is a compound term and List is the list whose head is the", "variables in X by new variables.''' def execute(self, template, goal, instances): if isvariable(goal):", "pick_indicator(self, pi): if not self.indicators: return False # the order in which predicate", "the corresponding arithmetic values are not equal.\"\"\" def execute(self, e1, e2): if isvariable(e1)", "of the set V.''' def execute(self, x): return not isvariable(x) class Number_1(BuiltIn): '''number(@term)", "..parser import PrologParser, InvalidTermException s = ''.join([char.name for char in charlist.as_list()]) try: #", "> 1: self.throw_type_error('atomic', elements.head) if isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) #", "import core # for flags if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f", "x.name == '_' and y.name == '_'): return True return x != y", "with Head :- Body.''' def execute(self, head, body): if isvariable(head): self.throw_instantiation_error() if not", "zip(wt_list, t_list): ww = wwtt.value[1] #from copy import deepcopy #subst = core.unify(ww, deepcopy(self.witness))", "# procedure are handled directly by the database from ..builtin import search_builtin if", "and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if X is a", "PrologEvaluationError('zero_divisor') if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if", "true. It is used to add Clause to the database before all existing", "a flag supported by the processor, and Value is the value currently associated", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x & y def bitor(x, y):", "* Term is a compound term and List is the list whose head", "of the variable set of V and the existential variable set of T.'''", "a list of the arguments of Term.\"\"\" def execute(self, term, elements): if isvariable(term)", "return Atomic(head.name) from ..core import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is", "s = findall.substitution['S'] self.s = self._create_solution_list(s) if not self.s: return False return self.pick_bag(template,", "PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer',", "in charlist.as_list()] return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List)", "code for the one-char atom Char is Code.''' def execute(self, char, code): if", "Atom_12) is true iff characters of the name of the atom Atom_12 are", "= List.from_list(t_list) self.s = s_next return self.unify(t_list, instances) def _create_solution_list(self, s): return []", "= core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s = s_next return", "NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true iff List is a list", "(isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero',", "the term G * else EV is the empty set.''' s = set()", "of the head of Clause.''' def execute(self, clause): head = clause.value[1] if clause.predicate_indicator()", "- (after.value if isinstance(after, Atomic) else 0) self.data = [] while start <=", "is the number of characters of the name of AtomL, Length is the", "renamed copy of t1 and t2 retain validity # only in the context", "y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y, int):", "if isnumber(head): self.throw_type_error('callable', head) # errors on the conversion of the clause body", "is not None} if not self.flags: return False return self.pick_flag(flag, value) def reexecute(self,", "self.flags: return False return self.pick_flag(flag, value) def pick_flag(self, flag, value): f = self.flags.pop()", "= caller.solve_next() values = List.EMPTY_LIST if not values else List.from_list(values) return self.unify(values, instances)", "y): '''Redefined w.r.t. Python because in ISO Prolog mod(x, y) is valid only", "Unify_2, '\\=/2' : NotUnifiable_2, # Type testing (ISO 8.3) 'var/1' : Var_1, 'atom/1'", "a1, a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'): a", "return x % y def sign(x): '''Redefined w.r.t. Python because in ISO Prolog", "the head of Clause.''' def execute(self, clause): head = clause.value[1] if clause.predicate_indicator() ==", "Prolog # semantics for anonymous variables if (isvariable(x) and isvariable(y) and x.name ==", "self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It", "def power(x, y): '''Redefined w.r.t. Python because in ISO Prolog x ** y", "name) # TODO Missing max_arity related errors if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity)", "after.value < 0: self.throw_domain_error('not_less_than_zero', after) n = atom.name start = before.value if isinstance(before,", "self.kb result = caller.solve(term) return not result class Repeat_0(BuiltIn): '''repeat repeat is true.", "'_' and y.name == '_'): return False return x == y class TermNotIdentical_2(BuiltIn):", "import Atomic, Variable, Compound, List from ..parser import isvariable, isatom, isnumber, islist, ispartiallist,", "execute(self, x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff X is", "''' def execute(self): return True def reexecute(self): return True ### ### Atomic term", "not isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c = chr(code.value) return", "Utility functions def free_variable_set(t, v): '''The free variable set FV of a term", "Y and X are identical terms.\"\"\" def execute(self, x, y): # The Python", "execute(self, atom, before, length, after, subatom): if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and", "elements are the character codes corresponding to a character sequence of Number which", "true iff: * Term is a compound term with a functor whose identifier", "of Term and whose tail is a list of the arguments of Term.\"\"\"", "y are integers. Also, we need to manage ZeroDivisionError errors on our own.'''", "result: from copy import deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result =", "the list of values to which a variable X not occurring in Template", "and E2 as expressions the corresponding arithmetic value of E1 is greater than", "False return self.pick_data(atom, before, length, after, subatom) def pick_data(self, atom, before, length, after,", "return isnumber(x) ### ### Term comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term)", "self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity related error if isinstance(term, Atomic): l =", "#t = core.renamed_copy(t1) t = t1._copy_term() # Can't directly use BuiltIn.unify because the", "True if isinstance(t, Compound) and isinstance(v, Compound): if t.name != v.name or t.arity", "an atomic term equal to Name and Arity is 0.''' def execute(self, term,", "solution, but the order in which each list is found is undefined.''' def", "the database which corresponds to a term H :- B which unifies with", "if T is an atomic term, then Sv is the empty set *", "### ### Term creation and decomposition (ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic,", "to whatever system invoked Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither", "codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element in codelist.as_list(): if", "self.pick_bag(template, goal, instances) def reexecute(self, template, goal, instances): self.reset_substitution() if not self.s: return", "error if isinstance(term, Atomic): l = List(term) return self.unify(elements, l) if isinstance(term, Compound):", "'.' and elements.arity == 2: if len(elements) == 1: t = elements.head return", "and not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(y, int): from ..core", "# Still preserve the binding for t2 just in # case t2 were", "x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering of two terms.", "is true iff PI is a predicate indicator for one of the user-defined", "self.substitution.update(subst) t_list = List.from_list(t_list) self.s = s_next return self.unify(t_list, instances) def _create_solution_list(self, s):", "if mgu is not None: if mgu: t2.apply(mgu) # Do not propagate renamed", "from ..core import deref if isinstance(term, Variable): if term.isfree(): return {term} else: term", "term.arity == 0 and term._isnumber(): return term.value if isinstance(term, Compound): from ..core import", "not in f.allowed: culprit = Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name)", "is not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self, clause):", "if X is a member of the set V, A, or CT.''' def", "= evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO No overflow/underflow errors #", "isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) # Utility functions def free_variable_set(t,", "term equal to Name and Arity is 0.''' def execute(self, term, name, arity):", "isinstance(term, Compound): from ..core import deref args = (evaluate_expression(deref(a)) for a in term.value[1:])", "PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x // y def module(x, y): '''Redefined w.r.t.", "own.''' if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if", "# Term unification (ISO 8.2) '=/2' : Unify_2, '\\=/2' : NotUnifiable_2, # Type", ": Float_1, 'atomic/1' : Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' :", "name of the atom Atom_1.''' def execute(self, atom1, atom2, atom12): if isvariable(atom1) and", "if pi.name == '/' and pi.arity == 2: name, arity = pi.value[1:] if", "TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering of two terms. '\\=='(X, Y) is true", "if not self.flags: return False return self.pick_flag(flag, value) def pick_flag(self, flag, value): f", "is false.\"\"\" def execute(self, term): if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from", "return self.pick_clause(head, body) def reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self,", "defined as the set difference of the variable set of T and BV", "functors (ISO 9.1) ### Other arithmetic functors (ISO 9.3) ### Bitwise functors (ISO", "Atomic(0)) if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if isinstance(term,", "l = List(term) return self.unify(elements, l) if isinstance(term, Compound): l = List.from_list([Atomic(term.name)] +", "if not x: return 0 if isinstance(x, int) else 0.0 from math import", "import PrologInstantiationError raise PrologInstantiationError() if term.arity == 0 and term._isnumber(): return term.value if", "Float_1, 'atomic/1' : Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' : Number_1,", "is Code.''' def execute(self, char, code): if isvariable(char) and isvariable(code): self.throw_instantiation_error() if not", "< 0 is defined only when y is an integer, and always returns", "procedures in the database.''' def execute(self, pi): if not isvariable(pi) and not (pi.name", "and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1) if not", "than the single # goal, even when using parentheses! '\\+/1' : Not_1, 'not/1'", "nonvar(X) is true iff X is not a member of the set V.'''", "and not isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12)", "value = mgu.get(term.name) if value: return value else: return ground(term.binding(), mgu) if isinstance(term,", "c in chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List)", "9.1) ### Other arithmetic functors (ISO 9.3) ### Bitwise functors (ISO 9.4) ###", "!= v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true iff evaluating E1", "execute(self, x): return isnumber(x) ### ### Term comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn):", "Clause creation and destruction (ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true.", "isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 > v2 class", "is a list whose elements are the one-char atoms whose names are the", "self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi): if", "set Sv of a term T is a set of variables defined recursively", "if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(number) or islist(charlist): from", "the side effect of exiting from the processor and returning to whatever system", "\"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true iff evaluating E1 and E2 as expressions", "E1 is less than or equal to the corresponding arithmetic value of E2.\"\"\"", "goal. Each list is a sorted list, but the order in which each", "not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(y,", "self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before, Length, After,", ": TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' :", "of the name of atom Atom, and the value of each element is", "and not isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term) and not isatom(name) and arity.value", "def pick_indicator(self, pi): if not self.indicators: return False # the order in which", "'sqrt/1' : squareroot, '>>/2' : rightshift, '<</2' : leftshift, '/\\\\/2' : bitand, '\\\\//2'", "and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if isinstance(name, Atomic) and arity.value == 0:", "self.data if d == subatom.value] if not self.data: return False return self.pick_data(atom, before,", "iff X and Y are not identical terms.\"\"\" def execute(self, x, y): #", "charlist.as_list(): if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(number) or islist(charlist):", "sequence of Number which could be output.''' def execute(self, number, charlist): if isvariable(number)", "self.throw_type_error('list', charlist) if islist(charlist): for element in charlist.as_list(): if isatom(element) and len(element.name) !=", "goal and on access permission to a user-defined # procedure are handled directly", "e in self.s if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for e in wt_list]", "comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2'", ": Arg_3, '=../2' : Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic evaluation (ISO 8.6)", "s): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g. n", "Arg) is true iff the Nth argument of Term is Arg.''' def execute(self,", "is true. It is used to add Clause to the database after all", "to a term V is a set of variables defined as the set", "= atom12.name if not s.startswith(atom1.name): return False else: i = len(atom1.name) self.data =", "and x.name == '_' and y.name == '_'): return True return x <=", "def isvariant(t, v): '''Two terms are variants if there is a bijection s", "if not procedure: return False if not procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity))", "findall.substitution['S'] self.s = self._create_solution_list(s) if not self.s: return False return self.pick_bag(template, goal, instances)", "self.throw_permission_error('modify', 'flag', flag) if value.name not in f.allowed: culprit = Compound('+', flag, value)", "of a term T is a term defined recursively as follows: * if", "isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer', code) if not isvariable(code): try: chr(code.value) except", "'flag', flag) if value.name not in f.allowed: culprit = Compound('+', flag, value) self.throw_domain_error('flag_value',", "+list) '=..'(Term, List) is true iff: * Term is an atomic term and", "islist(charlist) or ispartiallist(charlist): chars = [Atomic(c) for c in atom.name] return self.unify(charlist, List.from_list(chars))", "integer(X) is true iff X is a member of the set I.''' def", "- (b + l) return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a))", "arity) # error on access permission to a user-defined # procedure is handled", "because in ISO Prolog sign(x) must return the same type of number as", "equal to Name and Arity is 0.''' def execute(self, term, name, arity): if", "the corresponding arithmetic value of E1 is less than the corresponding arithmetic value", "a functor whose identifier is Name and arity Arity, or * Term is", "sqrt(x) def rightshift(n, s): '''Redefined w.r.t. Python because we need to manage TypeError", "self.pick_flag(flag, value) def pick_flag(self, flag, value): f = self.flags.pop() return self.unify(flag, Atomic(f.name)) and", "values are not equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error()", "a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'): a =", "true iff Y preceeds X or Y and X are identical terms.\"\"\" def", "atom Char is Code.''' def execute(self, char, code): if isvariable(char) and isvariable(code): self.throw_instantiation_error()", "not isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) # Utility functions def", "in range(arity.value)) c = Compound(name.name, *t) return self.unify(term, c) return False class Arg_3(BuiltIn):", "search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True)", "= len(n) - (after.value if isinstance(after, Atomic) else 0) self.data = [] while", "evaluate_expression(e2) return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true", "self.unify(term, t) else: return False else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1,", "Y.\"\"\" def execute(self, x, y): return x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term)", "= self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither", "{'+/2' : operator.add, '*/2' : operator.mul, '-/2' : operator.sub, '-/1' : operator.neg, '//2'", "in self.s if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for e in wt_list] s_next", "if isatom(name) and arity.value > 0: t = (Variable('_') for i in range(arity.value))", "repeat is re-executable. ''' def execute(self): return True def reexecute(self): return True ###", "iff integer Length equals the number of characters of the name of the", "core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s", "set FV of a term T with respect to a term V is", "not s.startswith(atom1.name): return False else: i = len(atom1.name) self.data = [(s[:i], s[i:], s)]", "does not work because of what is probably a parser # bug: the", "### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and Y are NSTO (Not Subject", "the character code for the one-char atom Char is Code.''' def execute(self, char,", "'*/2' : operator.mul, '-/2' : operator.sub, '-/1' : operator.neg, '//2' : divide, '///2'", "ISO Prolog float_fractional_part(x) is valid only when x is a float.''' if not", "Name and arity Arity, or * Term is an atomic term equal to", "core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f for f in core._FLAGS.values() if core.unify(flag, Atomic(f.name))", ": SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' :", "else 0) self.data = [] while start <= end: for i in range(start,", "float.''' if not isinstance(x, float): from ..core import PrologTypeError raise PrologTypeError('float', Atomic(x)) from", "'_' and y.name == '_'): return False return x >= y ### ###", "It is used to remove from the database the procedure specified by the", "pi) self.kb.abolish(pi) return True ### ### All solutions (ISO 8.10) ### class Findall_3(BuiltIn):", "(not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if (not isvariable(after)", "[deref(c).name for c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or", "on our own.''' if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if", "not x and y < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') return", "Atomic(c)) else: return ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List)", "name of AtomR.''' def execute(self, atom, before, length, after, subatom): if isvariable(atom): self.throw_instantiation_error()", "in procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is not None", "== 0 and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X) is true iff X", "PrologTypeError('integer', Atomic(y)) return x % y def sign(x): '''Redefined w.r.t. Python because in", "Test the ordering of two terms. '@<'(X, Y) is true iff X preceeds", "is the iterated goal term of Goal * else G is T.''' if", "set V.''' def execute(self, x): return not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is", "if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements)", "T with respect to a term V is a set of variables defined", "execute(self, template, goal, instances): fvs = free_variable_set(goal, template) self.witness = Compound('witness', *fvs) if", "### Clause retrival and information (ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head,", "isvariable(number) or islist(charlist): from ..parser import PrologParser, InvalidTermException s = ''.join([char.name for char", "errors (e.g. for log(0)) on our own.''' if not x: from ..core import", "self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2): s = atom12.name self.data = [(s[:i], s[i:],", "successive characters of the name of atom Atom, and the value of each", "'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ### ### All solutions (ISO", "if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses = [] procedure = self.kb.search(head)", "are in order of solution, but the order in which each list is", "true iff List is a list whose elements correspond to the successive characters", "atom.name.index(s, p) l = len(s) a = len(atom.name) - (b + l) return", "ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and", "8.8) 'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause creation and destruction (ISO", "is a bijection s of the variables of the former to the variables", "# Arithmetic evaluation (ISO 8.6) 'is/2' : Is_2, # Arithmetic comparison (ISO 8.7)", "return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer,", "as a message.''' def execute(self, x): if isvariable(x): self.throw_instantiation_error() if not isvariable(x) and", "of the set F.''' def execute(self, x): return x.arity == 0 and isinstance(x.value,", "None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution() if", "Number_1, # Term comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2'", "y): # The Python __ne__ method does not hold Prolog # semantics for", "from .. import core for clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body())", "in it. The elements of each list are in order of solution, but", "execute(self, x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff X is", "List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles as a list", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n << s def", "term and List is the list whose head is the functor name of", "self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn):", "before, length, after, subatom): s, p = self.data.pop(0) b = atom.name.index(s, p) l", "and y.name == '_'): return True return x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term,", "isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) for", "and arity.value > 0: self.throw_type_error('atom', name) if isinstance(term, Atomic): return self.unify(term, name) and", "### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value) enables the value associated", ": ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' :", "return n >> s def leftshift(n, s): '''Redefined w.r.t. Python because we need", "= atom12.name if not s.endswith(atom2.name): return False else: i = s.index(atom2.name) self.data =", "y): '''Redefined w.r.t. Python because in ISO Prolog x // y is valid", "execute(self, number, charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element in", "isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >", "CT.''' def execute(self, x): return isnumber(x) ### ### Term comparison (ISO 8.4) ###", "goal term G of a term T is a term defined recursively as", "class Repeat_0(BuiltIn): '''repeat repeat is true. repeat is re-executable. ''' def execute(self): return", "the set I.''' def execute(self, x): return x.arity == 0 and isinstance(x.value, int)", "one-char atoms whose names are the successive characters of the name of atom", "not procedure: return False if not procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access',", "that Before is the number of characters of the name of AtomL, Length", "d.get(name) def divide(x, y): '''Redefined w.r.t. Python because in ISO Prolog div(x, y)", "return v1 >= v2 ### ### Clause retrival and information (ISO 8.8) ###", "if islist(elements) and isinstance(elements.head, Compound) and len(elements) > 1: self.throw_type_error('atomic', elements.head) if isvariable(term)", "0 and term._isnumber(): return term.value if isinstance(term, Compound): from ..core import deref args", "free_variable_set(t, v): '''The free variable set FV of a term T with respect", "pieces, AtomL, Sub_atom, and AtomR, such that Before is the number of characters", "self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn):", "the one-char atom Char is Code.''' def execute(self, char, code): if isvariable(char) and", "List.from_list(t_list) self.s = s_next return self.unify(t_list, instances) def _create_solution_list(self, s): return [] if", "PrologTypeError('integer', Atomic(y)) return x // y def module(x, y): '''Redefined w.r.t. Python because", "for char in atom.name] return self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value) for code", "### ### Clause retrival and information (ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term)", "if not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) for element in charlist.as_list(): if", "AtomR.''' def execute(self, atom, before, length, after, subatom): if isvariable(atom): self.throw_instantiation_error() if not", "variables defined as the set difference of the variable set of T and", "of the arguments of T.''' from ..core import deref if isinstance(term, Variable): if", "class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true iff List is a", "directly by the database t = tuple(Variable('_') for i in range(arity.value)) c =", "ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for element in codelist.as_list(): if not isvariable(element): try:", "s[i:], s) for i in range(len(s) + 1)] elif isvariable(atom1): s = atom12.name", "isvariable(atom): from ..core import deref chars = [deref(c).name for c in charlist.as_list()] return", "Sub_atom) is true iff atom Atom can be broken into three pieces, AtomL,", ": divide, '///2' : intdivide, 'mod/2' : module, 'rem/2' : module, 'floor/1' :", "and y < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x **", "y): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g. x", "compound term then Sv is the union of the variable sets for each", "iff: * the predicate of Head is public, and * there is a", "and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s def iterated_goal_term(term): '''The", "from replacing each variable X in the former by Xs.''' from ..core import", "a1.name.startswith('_'): a = bijection.get(a1) if a is not None and a2 != a:", "head) if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses = [] procedure =", "import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x ** y) def logarithm(x): '''Redefined w.r.t. Python", "are variants if there is a bijection s of the variables of the", "### Atomic term processing (ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length)", "t = Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else: return False else: return False", "..parser import PrologParser, InvalidTermException s = ''.join([chr(code.value) for code in codelist.as_list()]) try: #", "False else: bijection[a1] = a2 else: if not isvariant(a1, a2): return False return", "Assertz_1, 'retract/1' : Retract_1, 'abolish/1' : Abolish_1, # All solutions (ISO 8.10) 'findall/3'", "AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2,", "if isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term) and not isatom(name)", "return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true iff Flag is", "same state as if the procedure identified by Pred had never existed.''' def", "which could be output.''' def execute(self, number, charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error()", "Test the ordering of two terms. '=='(X, Y) is true iff X and", "if isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(atom):", "isinstance(term, Atomic): return self.unify(term, name) and self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)): return", "to manage ValueError errors (e.g. for log(0)) on our own.''' if not x:", "fvs = free_variable_set(goal, template) self.witness = Compound('witness', *fvs) if fvs else Atomic('witness') g", "X are identical terms.\"\"\" def execute(self, x, y): # The Python __eq__ method", "isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and", "had never existed.''' def execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if pi.name == '/'", "def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity == 0: return", "import core #t = core.renamed_copy(t1) t = t1._copy_term() # Can't directly use BuiltIn.unify", "import core for wwtt, t in zip(wt_list, t_list): ww = wwtt.value[1] #from copy", "..parser import Atomic, Variable, Compound, List from ..parser import isvariable, isatom, isnumber, islist,", "To Occur-check) then '\\\\='(X, Y) is true iff X and Y are not", "not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is true if X is a member", "atom, charlist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if", "s of the variables of the former to the variables of the latter", "not self.s: return False return self.pick_bag(template, goal, instances) def reexecute(self, template, goal, instances):", "self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO No overflow/underflow", "import search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause,", "?list) setof/3 assembles as a list the solutions of a goal for each", "s = atom12.name if not s.endswith(atom2.name): return False else: i = s.index(atom2.name) self.data", "if (isvariable(x) and isvariable(y) and x.name == '_' and y.name == '_'): return", "else Atomic('witness') g = iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S'))", "because in ISO Prolog float_fractional_part(x) is valid only when x is a float.'''", "as expressions the corresponding arithmetic value of E1 is less than the corresponding", "not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f for f in core._FLAGS.values() if core.unify(flag,", "existing clauses of the procedure whose predicate is equal to the functor of", "x, y): # TODO prologlib crashes if you attempt to unify two STO", "if term.isfree(): return {term} else: term = deref(term) if isinstance(term, Atomic): return set()", "equivalent to intdiv(x, y). Also, we need to manage ZeroDivisionError errors on our", "an integer, and always returns a float. Also, we need to manage ZeroDivisionError", "* if T unifies with ^(_, Goal) then G is the iterated goal", ": Assertz_1, 'retract/1' : Retract_1, 'abolish/1' : Abolish_1, # All solutions (ISO 8.10)", "not occurring in Template or Goal would be instantiated by successive re-executions of", "# current_predicate/1 is implementation dependent i = self.indicators.pop() return self.unify(pi, i) ### ###", "head.arity == 0: return Atomic(head.name) from ..core import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn):", "of variables defined recursively as follows: * if T is a variable or", "PrologTypeError('integer', Atomic(x)) return ~x ### ### Arithmetic comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn):", "if term.arity == 0 and term._isnumber(): return term.value if isinstance(term, Compound): from ..core", "atom, before, length, after, subatom): s, p = self.data.pop(0) b = atom.name.index(s, p)", ": Nonvar_1, 'number/1' : Number_1, # Term comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2,", "i def float_fractional_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_fractional_part(x) is valid", "length, after, subatom) def reexecute(self, atom, before, length, after, subatom): self.reset_substitution() if not", "else: return ground(term.binding(), mgu) if isinstance(term, Compound): args = [] for arg in", "# Utility functions def free_variable_set(t, v): '''The free variable set FV of a", "state as if the procedure identified by Pred had never existed.''' def execute(self,", "None: if mgu: t2.apply(mgu) # Do not propagate renamed term variables bindings #", "not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if", "from the processor and returning to whatever system invoked Prolog, passing the value", "v2 ### ### Clause retrival and information (ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head,", "arg in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args) return term class Bagof_3(BuiltIn): '''bagof(?term,", "'<</2' : leftshift, '/\\\\/2' : bitand, '\\\\//2' : bitor, '\\\\/1' : bitnot} return", "class Float_1(BuiltIn): '''float(@term) float(X) is true iff X is a member of the", "evaluate_expression(e2) return v1 >= v2 ### ### Clause retrival and information (ISO 8.8)", "s = atom12.name self.data = [(s[:i], s[i:], s) for i in range(len(s) +", "t2 just in # case t2 were a renamed variable (e.g. coming #", "h, b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is not None and core.unify(b,", "[(s[:i], s[i:], s) for i in range(len(s) + 1)] elif isvariable(atom1): s =", "variable (e.g. coming # from a clause renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name", "is a member of the set I.''' def execute(self, x): return x.arity ==", "def execute(self, x): return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff", "element is the character code for the corresponding character of the name.''' def", "should use write_canonical/1 lst = [Atomic(ord(c)) for c in chars] return self.unify(codelist, List.from_list(lst))", "c) def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity == 0:", "class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term, Arg) is true iff the Nth", "Clause creation and destruction (ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1, 'retract/1'", "= wwtt.value[1] #from copy import deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww,", "with a term T which is a renamed copy of Term_1.''' def execute(self,", "T is a term defined recursively as follows: * if T unifies with", "args = [] for arg in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args) return", "functors (ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true iff", "Head :- Body.''' def execute(self, head, body): if isvariable(head): self.throw_instantiation_error() if not iscallable(head):", "isinstance(before.value, int))): self.throw_type_error('integer', before) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))):", "variants if there is a bijection s of the variables of the former", "class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true iff List is a", "(ISO 8.15) # FIXME \\+ does not work because of what is probably", "@term) Test the ordering of two terms. '@>'(X, Y) is true iff Y", "if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom) and not", "s = set() if isinstance(term, Compound): for arg in term.value[1:]: s.update(variable_set(arg)) else: #", ": ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause", "return x | y def bitnot(x): '''Redefined w.r.t. Python because we need to", "return False else: i = len(atom1.name) self.data = [(s[:i], s[i:], s)] else: n1", "E2 as expressions the corresponding arithmetic value of E1 is greater than the", "list is found is undefined.''' def _create_solution_list(self, s): solutions = [] if s", "Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List) is true iff: * Term is", "'''integer(@term) integer(X) is true iff X is a member of the set I.'''", "execute(self, number, codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element in", "== after.value] if isinstance(subatom, Atomic): self.data = [(d, p) for (d, p) in", "raise PrologEvaluationError('undefined') if not x and y < 0: from ..core import PrologEvaluationError", "= elements.head return self.unify(term, t) elif len(elements) > 1: name = elements.head.name t", "raise PrologTypeError('integer', Atomic(s)) return n << s def bitand(x, y): '''Redefined w.r.t. Python", "variable or an atomic term, then EV is the empty set * else", "power(x, y): '''Redefined w.r.t. Python because in ISO Prolog x ** y with", "'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff", "use write_canonical/1 lst = [Atomic(ord(c)) for c in chars] return self.unify(codelist, List.from_list(lst)) ###", "and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither succeeds nor fails. It has", "\"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true iff evaluating E1 and E2 as expressions", "# Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2, 'halt/0'", "for one of the user-defined procedures in the database.''' def execute(self, pi): if", "import modf f, i = modf(x) return f def power(x, y): '''Redefined w.r.t.", "else: chars = list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(c)", "Float_1(BuiltIn): '''float(@term) float(X) is true iff X is a member of the set", "system invoked Prolog, passing the value of X as a message.''' def execute(self,", "not isvariable(pi) and not (pi.name == '/' and pi.arity == 2): self.throw_type_error('predicate_indicator', pi)", "'not/1' : Not_1, 'repeat/0' : Repeat_0, # Atomic term processing (ISO 8.16) 'atom_length/2'", "None and core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head,", "2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s def iterated_goal_term(term): '''The iterated goal term", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x // y def module(x,", "isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1)", "is true. repeat is re-executable. ''' def execute(self): return True def reexecute(self): return", "self.data if n.index(d, p) == before.value] if isinstance(length, Atomic): self.data = [(d, p)", "on access permission to a user-defined # procedure is handled directly by the", "term defined recursively as follows: * if T unifies with ^(_, Goal) then", "self.unify(values, instances) def ground(term, mgu): if isinstance(term, Variable): if not term.value: value =", "= before.value if isinstance(before, Atomic) else 0 end = len(n) - (after.value if", "the former by Xs.''' from ..core import deref t = deref(t) v =", "return log(x) def squareroot(x): '''Redefined w.r.t. Python because we need to manage ValueError", "not isinstance(term, Compound): self.throw_type_error('compound', term) if n.value < 0: self.throw_domain_error('not_less_than_zero', n) if n.value", "different instantiation of the free variables in that goal. Each list is a", "{f for f in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not None} if not", "with Head :- Body. It is used to remove those unifying clauses from", "def bitor(x, y): '''Redefined w.r.t. Python because we need to manage TypeError errors", "and decomposition (ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer)", "recursively as follows: * if T unifies with ^(_, Goal) then G is", "evaluating E1 and E2 as expressions the corresponding arithmetic value of E1 is", "if n.index(d, p) == before.value] if isinstance(length, Atomic): self.data = [(d, p) for", "true iff X is a member of the set F.''' def execute(self, x):", "different instantiation of the free variables in it. The elements of each list", "is 0.''' def execute(self, term, name, arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error() if", "return self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO No overflow/underflow errors # TODO No", "PrologTypeError('integer', Atomic(n)) if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s))", "atom_codes(Atom, List) is true iff List is a list whose elements correspond to", "'cos/1' : math.cos, 'atan/1' : math.atan, 'exp/1' : math.exp, 'log/1' : logarithm, 'sqrt/1'", "Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove = [] procedure = self.kb.search(head)", "True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff the database contains at least", "the procedure specified by the predicate indicator Pred and all its clauses, leaving", "int))): self.throw_type_error('integer', length) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) size =", "is true iff X is not a member of the set V.''' def", "functor whose identifier is Name and arity Arity, or * Term is an", "### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is true iff integer Length equals", "self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator()", "mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu) return True return False ### ###", "if isinstance(term, Atomic): return set() s = set() if isinstance(term, Compound): for arg", "Python __eq__ method does not hold Prolog # semantics for anonymous variables if", "def intdivide(x, y): '''Redefined w.r.t. Python because in ISO Prolog x // y", ": Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' : Retract_1, 'abolish/1' : Abolish_1, # All", "true iff X and Y are identical terms.\"\"\" def execute(self, x, y): #", "not isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n <<", "isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term) and not isatom(name) and arity.value > 0:", "Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is", "self.unify(term, t) elif len(elements) > 1: name = elements.head.name t = Compound(name, *elements.as_list()[1:])", "if d == subatom.value] if not self.data: return False return self.pick_data(atom, before, length,", "Clause to the database after all existing clauses of the procedure whose predicate", "== '_'): return False return x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test", "import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x % y def sign(x): '''Redefined w.r.t.", "if not islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and islist(elements) and", "the list whose head is the functor name of Term and whose tail", "wwtt, t in zip(wt_list, t_list): ww = wwtt.value[1] #from copy import deepcopy #subst", "Var_1(BuiltIn): '''var(@term) var(X) is true iff X is a member of the set", "t_list): ww = wwtt.value[1] #from copy import deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst", "False else: i = s.index(atom2.name) self.data = [(s[:i], s[i:], s)] elif isvariable(atom2): s", "expression is Result.\"\"\" def execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression)", "# Clause creation and destruction (ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1,", "f = core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag) if value.name not", "execute(self, term): if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from .. import core", "atom Atom_12 are the result of concatenating the characters of the name of", "list whose only element is Term, or * Term is a compound term", "Not_1, 'not/1' : Not_1, 'repeat/0' : Repeat_0, # Atomic term processing (ISO 8.16)", "Python because in ISO Prolog float_integer_part(x) is valid only when x is a", "A or I or F and is false if X is a member", "divide, '///2' : intdivide, 'mod/2' : module, 'rem/2' : module, 'floor/1' : math.floor,", "clause.value[1] if clause.predicate_indicator() == ':-/2' else clause if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable',", "and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom,", "from .. import core #t = core.renamed_copy(t1) t = t1._copy_term() # Can't directly", "evaluate_expression(term): # TODO No overflow/underflow errors # TODO No undefined errors if isvariable(term):", "8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2, 'halt/0' : Halt_0, 'halt/1' : Halt_1", "execute(self, term, elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and not", "isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(codelist) and not", "and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist)", "of Number which could be output.''' def execute(self, number, codelist): if isvariable(number) and", "term then Sv is the union of the variable sets for each of", "is a sorted list, but the order in which each list is found", "True return x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering of", "self.data = [(s[:i], s[i:], s)] elif isvariable(atom2): s = atom12.name if not s.startswith(atom1.name):", "the value of X as a message.''' def execute(self, x): if isvariable(x): self.throw_instantiation_error()", ".. import core caller = core.Caller() caller._kb = self.kb values = [] result", "expressions the corresponding arithmetic value of E1 is less than the corresponding arithmetic", "b)) return self.pick_clause(head, body) def reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head, body) def", "# semantics for anonymous variables if (isvariable(x) and isvariable(y) and x.name == '_'", "0 if isinstance(x, int) else 0.0 from math import copysign s = copysign(1,", "= Compound(name.name, *t) from ..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else:", "of the variable sets for each of the arguments of T.''' from ..core", "atom Atom.''' def execute(self, atom, charlist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom',", "the database the procedure specified by the predicate indicator Pred and all its", "self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if", "= evaluate_expression(e2) return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is", "T unifies with ^(_, Goal) then G is the iterated goal term of", "atom12) if isvariable(atom1) and isvariable(atom2): s = atom12.name self.data = [(s[:i], s[i:], s)", "is true iff X preceeds Y or X and Y are identical terms.\"\"\"", "return self.unify(char, Atomic(c)) elif isvariable(code): c = ord(char.name) return self.unify(code, Atomic(c)) else: return", "y.name == '_'): return True return x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term)", "and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom',", "if not self.data: return False return self.pick_data(atom, before, length, after, subatom) def reexecute(self,", "iff X preceeds Y.\"\"\" def execute(self, x, y): return x < y class", "x) exit(x.value) # Utility functions def free_variable_set(t, v): '''The free variable set FV", "CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation defined hooks (ISO 8.17)", "### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity) is", "try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c = chr(code.value) return self.unify(char, Atomic(c))", "if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ### ###", "length) if isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero', after) n = atom.name start", "?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true iff List is a list whose elements", "# Type testing (ISO 8.3) 'var/1' : Var_1, 'atom/1' : Atom_1, 'integer/1' :", "term V is a set of variables defined as the set difference of", "return True return x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering", "'float/1' : Float_1, 'atomic/1' : Atomic_1, 'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1, 'number/1'", "isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2)", "### Term creation and decomposition (ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer)", "class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is true iff", "the successive characters of the name of atom Atom, and the value of", "and Y are unifiable.\"\"\" def execute(self, x, y): # TODO prologlib crashes if", "systematic replacement of all variables in X by new variables.''' def execute(self, template,", "same type of number as its input.''' if not x: return 0 if", "return False return self.pick_flag(flag, value) def reexecute(self, flag, value): self.reset_substitution() if not self.flags:", "and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(number) or islist(charlist): from ..parser import", "Prolog sign(x) must return the same type of number as its input.''' if", "return x.arity == 0 and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X) is true", "[e for e in self.s if e not in wt_list] from .. import", "true iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true iff", "isinstance(after, Atomic) else 0) self.data = [] while start <= end: for i", "term T which is a renamed copy of Term_1.''' def execute(self, t1, t2):", "deref(v) if isinstance(t, Atomic) and isinstance(v, Atomic): return t == v if isvariable(t)", "found is undefined.''' def _create_solution_list(self, s): solutions = [] if s == List.EMPTY_LIST", "and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true iff", "a list whose elements correspond to the successive characters of the name of", "'=:='(E1, E2) is true iff evaluating E1 and E2 as expressions the corresponding", "Instances unifies with the list of values to which a variable X not", "for the corresponding character of the name.''' def execute(self, atom, codelist): if not", "(ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value) enables the", "isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from .. import core caller = core.Caller()", "def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi): if not self.indicators: return", "if isvariable(number) and not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for", "the variable set of V and the existential variable set of T.''' vst", "true iff call(Term) is false.\"\"\" def execute(self, term): if isvariable(term): self.throw_instantiation_error() if isnumber(term):", "code in codelist.as_list()] return atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char,", "execute(self, x): return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff", "'\\\\//2' : bitor, '\\\\/1' : bitnot} return d.get(name) def divide(x, y): '''Redefined w.r.t.", "Sv is the union of the variable sets for each of the arguments", "Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances) assembles as a list the solutions", "self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom,", "PrologEvaluationError('undefined') return float(x ** y) def logarithm(x): '''Redefined w.r.t. Python because we need", "Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove = [] procedure =", "x.name == '_' and y.name == '_'): return True return x <= y", "f = self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt", "execute(self, n, term, arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int):", "charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars = [Atomic(c)", "Xs.''' from ..core import deref t = deref(t) v = deref(v) if isinstance(t,", "is defined only when y is an integer, and always returns a float.", "(core.unify(h, head) is not None and core.unify(b, body) is not None): self.clauses.append(Compound('clause', h,", "e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2)", "if not values else List.from_list(values) return self.unify(values, instances) def ground(term, mgu): if isinstance(term,", "identifier is Name and arity Arity, or * Term is an atomic term", "v.arity: return False bijection = {} for a1, a2 in zip(t.value[1:], v.value[1:]): if", "when y is an integer, and always returns a float. Also, we need", "import core # for flags if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag)", "8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' : TermNotIdentical_2, '@</2' : TermLessThan_2,", "not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for element in charlist.as_list():", "def reexecute(self, atom, before, length, after, subatom): self.reset_substitution() if not self.data: return False", "'''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true iff List is a list whose", "isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero', before) if", "Number which could be output.''' def execute(self, number, codelist): if isvariable(number) and ispartiallist(codelist):", "if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element in codelist.as_list(): if isvariable(element):", "return False return self.pick_flag(flag, value) def pick_flag(self, flag, value): f = self.flags.pop() return", "procedure are handled directly by the database from ..builtin import search_builtin if search_builtin(head):", "whose predicate is equal to the functor of the head of Clause.''' def", "'''The iterated goal term G of a term T is a term defined", "of what is probably a parser # bug: the operator's \"scope\" is much", "= {'+/2' : operator.add, '*/2' : operator.mul, '-/2' : operator.sub, '-/1' : operator.neg,", "isinstance(term, Variable): if term.isfree(): return {term} else: term = deref(term) if isinstance(term, Atomic):", "The elements of each list are in order of solution, but the order", "and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(atom): from ..core import deref chars", "# Do not propagate renamed term variables bindings # outside the context of", "clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is not", "return self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value) for code in codelist.as_list()] return atom.name", "self.throw_type_error('list', codelist) if islist(codelist): for element in codelist.as_list(): if not isvariable(element): try: chr(element.value)", "and Arity is 0.''' def execute(self, term, name, arity): if isvariable(term) and isvariable(name):", "isinstance(arity.value, int): self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom', name) # TODO Missing max_arity", "def existential_variable_set(term): '''The existential variables set EV of a term T is a", "two terms. '@=<'(X, Y) is true iff X preceeds Y or X and", "= self.kb values = [] result = caller.solve(goal) while result: from copy import", "atom Atom_2 to the characters of the name of the atom Atom_1.''' def", "= evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >= v2 ### ### Clause retrival", "[c.name for c in charlist.as_list()] return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom,", "and isinstance(length.value, int))): self.throw_type_error('integer', length) if (not isvariable(after) and not (isnumber(after) and isinstance(after.value,", "if not isinstance(n, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not", "s.index(atom2.name) self.data = [(s[:i], s[i:], s)] elif isvariable(atom2): s = atom12.name if not", "isinstance(x, float): from ..core import PrologTypeError raise PrologTypeError('float', Atomic(x)) from math import modf", "return False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List)", "import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x | y def bitnot(x): '''Redefined w.r.t.", "len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag) if value.name not in f.allowed: culprit =", "Compound): for arg in term.value[1:]: s.update(variable_set(arg)) else: # a list for e in", "Y) is true iff X and Y are not identical terms.\"\"\" def execute(self,", "y is an integer, and always returns a float. Also, we need to", "the set difference of the variable set of T and BV where BV", "return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true iff", "def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor fails. It has", "terms are variants if there is a bijection s of the variables of", "value of E2.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1", "and information (ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is true", "and not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) # Utility functions def free_variable_set(t, v):", "Compound('/', Atomic(n), Atomic(int(a))) from .. import core if core.unify(pi, indicator) is not None:", "Atomic(s)) return n << s def bitand(x, y): '''Redefined w.r.t. Python because we", "subatom): s, p = self.data.pop(0) b = atom.name.index(s, p) l = len(s) a", "variables defined recursively as follows: * if T is an atomic term, then", "+character_list) atom_chars(Atom, List) is true iff List is a list whose elements are", "search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True", "undefined errors if isvariable(term): from ..core import PrologInstantiationError raise PrologInstantiationError() if term.arity ==", "head = clause body = Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self, head, body):", "arithmetic value of E1 is less than or equal to the corresponding arithmetic", "AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is true iff characters", "### All solutions (ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal,", "characters of the name of AtomR.''' def execute(self, atom, before, length, after, subatom):", "core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def", "v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 == v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable,", "follows: * if T unifies with ^(_, Goal) then G is the iterated", "elements correspond to the successive characters of the name of atom Atom, and", "len(n) - (after.value if isinstance(after, Atomic) else 0) self.data = [] while start", "'truncate/1' : math.trunc, 'float/1' : float, 'abs/1' : operator.abs, 'sign/1' : sign, 'float_integer_part/1'", "those unifying clauses from the database.''' def execute(self, clause): if clause.predicate_indicator() == ':-/2':", "raise PrologTypeError('float', Atomic(x)) from math import modf f, i = modf(x) return i", ": operator.abs, 'sign/1' : sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' :", "'_' and y.name == '_'): return True return x <= y class TermIdentical_2(BuiltIn):", "arguments of Term.\"\"\" def execute(self, term, elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if", "variable set FV of a term T with respect to a term V", "is a member of the set I or F and is false if", "the corresponding arithmetic value of E1 is greater than or equal to the", "set V or CT.''' def execute(self, x): return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term)", "return core.unify(x, y) is None ### ### Type testing (ISO 8.3) ### class", "list whose elements are the character codes corresponding to a character sequence of", "'atom/1' : Atom_1, 'integer/1' : Integer_1, 'float/1' : Float_1, 'atomic/1' : Atomic_1, 'compound/1'", "1: self.throw_type_error('character', char) if not isvariable(code) and not isinstance(code.value, int): self.throw_type_error('integer', code) if", "(ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' : Retract_1, 'abolish/1' :", "return x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering of two", "in # case t2 were a renamed variable (e.g. coming # from a", ": CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation defined hooks (ISO", "variables defined as the union of the variable set of V and the", "raise PrologTypeError('integer', Atomic(x)) return ~x ### ### Arithmetic comparison (ISO 8.7) ### class", "Length is the number of characters of the name of Sub_atom, and After", "=/2 # instead of using the proper unify_with_occur_check/2 predicate. return self.unify(x, y) class", "are the successive characters of the name of atom Atom.''' def execute(self, atom,", "to the characters of the name of the atom Atom_1.''' def execute(self, atom1,", "int): self.throw_type_error('integer', code) if not isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char):", "the one-char atoms whose names are the successive characters of the name of", "def execute(self, x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff X", "the empty set * else if T is a variable then Sv is", "and not isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) # Utility functions", "isinstance(term, Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l) if isinstance(term, Variable):", "s if term.name == '^' and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s", "not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify',", "a = len(atom.name) - (b + l) return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l))", "term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List) is true iff: *", "effect of exiting from the processor and returning to whatever system invoked Prolog.'''", "Variable('S')) s = findall.substitution['S'] self.s = self._create_solution_list(s) if not self.s: return False return", "and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if (not isvariable(after) and not", "is true. It is used to add Clause to the database before all", "float(x ** y) def logarithm(x): '''Redefined w.r.t. Python because we need to manage", "isvariable(atom): chars = [chr(code.value) for code in codelist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist)", "temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu) return True return", "by =/2 # instead of using the proper unify_with_occur_check/2 predicate. return self.unify(x, y)", "'''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true iff Flag is a flag supported by", "user-defined # procedure is handled directly by the database from ..builtin import search_builtin", "self.substitution.update(mgu) return True return False ### ### Arithmetic evaluation (ISO 8.6) ### Simple", "i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a))) from .. import core if core.unify(pi, indicator)", "Prolog float_integer_part(x) is valid only when x is a float.''' if not isinstance(x,", "case t2 were a renamed variable (e.g. coming # from a clause renaming)", "Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' : Retract_1, 'abolish/1' : Abolish_1, # All solutions", "== '_'): return False return x >= y ### ### Term creation and", "each different instantiation of the free variables in that goal. Each list is", "is true iff Y preceeds X or Y and X are identical terms.\"\"\"", "deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s =", "to add Clause to the database before all existing clauses of the procedure", "elements of each list are in order of solution, but the order in", "isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >=", "d = {'+/2' : operator.add, '*/2' : operator.mul, '-/2' : operator.sub, '-/1' :", "iff List is a list whose elements are the one-char atoms whose names", "the ordering of two terms. '=='(X, Y) is true iff X and Y", "X and Y are NSTO (Not Subject To Occur-check) then '='(X, Y) is", "@evaluable) '>'(E1, E2) is true iff evaluating E1 and E2 as expressions the", "errors (e.g. x or y as float) on our own.''' if not isinstance(x,", "math import sqrt return sqrt(x) def rightshift(n, s): '''Redefined w.r.t. Python because we", "if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not", "isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <=", "is not None and a2 != a: return False else: bijection[a1] = a2", "and not ispartiallist(instances))): self.throw_type_error('list', instances) from .. import core caller = core.Caller() caller._kb", "isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero', after) n = atom.name start = before.value", "self.kb.search(head) if not procedure: return False if not procedure._public: pi = Compound('/', Atomic(head.name),", "isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >= v2 ###", "in the database.''' def execute(self, pi): if not isvariable(pi) and not (pi.name ==", "?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true iff List is a list whose elements", "onto the Term hierarchy classes def variable_set(term): '''The variable set Sv of a", "from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x % y def sign(x):", "self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It is used", "'''Redefined w.r.t. Python because we need to manage TypeError errors (e.g. n as", "parser # bug: the operator's \"scope\" is much wider than the single #", "in ISO Prolog div(x, y) with x and y integers is equivalent to", "and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom)", "of E1 is greater than the corresponding arithmetic value of E2.\"\"\" def execute(self,", "in mgu: # Still preserve the binding for t2 just in # case", "isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO Missing max_arity related error if isvariable(term) and", "TODO prologlib crashes if you attempt to unify two STO terms by =/2", "### ### Arithmetic evaluation (ISO 8.6) ### Simple arithmetic functors (ISO 9.1) ###", "execute(self, x): return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff X", "modf(x) return f def power(x, y): '''Redefined w.r.t. Python because in ISO Prolog", "the atom Atom.''' def execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error() if not isatom(atom):", "mgu)) return Compound(term.name, *args) return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal,", "### Arithmetic comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is", "if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element in charlist.as_list(): if isvariable(element):", "input.''' if not x: return 0 if isinstance(x, int) else 0.0 from math", "and self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity))", "if t.name != v.name or t.arity != v.arity: return False bijection = {}", "ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true iff evaluating E1 and E2 as", "set.''' s = set() if isinstance(term, Atomic) or isvariable(term): return s if term.name", "== '.' and elements.arity == 2: if len(elements) == 1: t = elements.head", "term.arity == 2: return iterated_goal_term(term.value[2]) return term def isvariant(t, v): '''Two terms are", "be output.''' def execute(self, number, charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number):", "t) elif len(elements) > 1: name = elements.head.name t = Compound(name, *elements.as_list()[1:]) return", "while result: from copy import deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result", "self.throw_type_error('compound', term) if n.value < 0: self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value): return", "only when y is an integer, and always returns a float. Also, we", "because the bindings # between the renamed copy of t1 and t2 retain", "is true iff X and Y are not unifiable.\"\"\" def execute(self, x, y):", "Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # #", "neither succeeds nor fails. It has the side effect of exiting from the", "our own.''' if x < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from", "return True if isinstance(t, Compound) and isinstance(v, Compound): if t.name != v.name or", "of the name of the atom Atom_1.''' def execute(self, atom1, atom2, atom12): if", "### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term)", "+ n2)] return self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1, atom2, atom12): self.reset_substitution() if", "Bagof_3, 'setof/3' : Setof_3, # Logic and control (ISO 8.15) # FIXME \\+", "the set F.''' def execute(self, x): return x.arity == 0 and isinstance(x.value, float)", "def float_integer_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_integer_part(x) is valid only", "template, goal, instances): wt = self.s[0] wt_list = [e for e in self.s", "# case t2 were a renamed variable (e.g. coming # from a clause", "the character codes corresponding to a character sequence of Number which could be", "then EV is the union of the variable set of V and the", "@term) Test the ordering of two terms. '\\=='(X, Y) is true iff X", "if isinstance(before, Atomic) else 0 end = len(n) - (after.value if isinstance(after, Atomic)", "which unifies with Head :- Body.''' def execute(self, head, body): if isvariable(head): self.throw_instantiation_error()", "of E1 is less than or equal to the corresponding arithmetic value of", "'''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true iff Term_2 unifies with a term T", "isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom) and not isatom(subatom): self.throw_type_error('atom', subatom) if (not", "### Logic and control (ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true", "elif isvariable(atom1): s = atom12.name if not s.endswith(atom2.name): return False else: i =", "of Clause.''' def execute(self, clause): head = clause.value[1] if clause.predicate_indicator() == ':-/2' else", "No overflow/underflow errors # TODO No undefined errors if isvariable(term): from ..core import", "term): if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from .. import core caller", "is a member of the set F.''' def execute(self, x): return x.arity ==", "return x // y def module(x, y): '''Redefined w.r.t. Python because in ISO", "atom Atom_1.''' def execute(self, atom1, atom2, atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if", "the name of atom Atom.''' def execute(self, atom, charlist): if not isvariable(atom) and", "y): # TODO prologlib crashes if you attempt to unify two STO terms", "own.''' if x < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math", "Y are not identical terms.\"\"\" def execute(self, x, y): # The Python __ne__", "y): return x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering of", "search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ###", "'>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival and information (ISO 8.8)", "self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true", "y return x / y def intdivide(x, y): '''Redefined w.r.t. Python because in", "self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >= v2 ### ###", "specified by the predicate indicator Pred and all its clauses, leaving the database", "'=..'(-nonvar, +list) '=..'(Term, List) is true iff: * Term is an atomic term", "math.atan, 'exp/1' : math.exp, 'log/1' : logarithm, 'sqrt/1' : squareroot, '>>/2' : rightshift,", "_create_solution_list(self, s): solutions = [] if s == List.EMPTY_LIST else s.as_list() solutions =", "return False else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true", ": math.sin, 'cos/1' : math.cos, 'atan/1' : math.atan, 'exp/1' : math.exp, 'log/1' :", ": AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' :", "result class Repeat_0(BuiltIn): '''repeat repeat is true. repeat is re-executable. ''' def execute(self):", "isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 !=", "and not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for element in", "= clause.value[1] body = clause.value[2] else: head = clause body = Atomic.TRUE if", "+integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity) is true iff: * Term is", "> 0: self.throw_type_error('atom', name) if isinstance(term, Atomic): return self.unify(term, name) and self.unify(arity, Atomic(0))", "if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag)", "isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for e in wt_list] s_next = [e for", "a parser # bug: the operator's \"scope\" is much wider than the single", "= mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu) return True return False", "isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(charlist) and not", "from the database.''' def execute(self, clause): if clause.predicate_indicator() == ':-/2': head = clause.value[1]", "preceeds X.\"\"\" def execute(self, x, y): return x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term,", "list(set(solutions)) solutions.sort() return solutions ### ### Logic and control (ISO 8.15) ### class", "for wwtt, t in zip(wt_list, t_list): ww = wwtt.value[1] #from copy import deepcopy", "= PrologParser(s + '.').read_term() return self.unify(number, n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else:", "if term.name == '^' and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return", "is the number of characters of the name of AtomR.''' def execute(self, atom,", "Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom,", "import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n >> s def leftshift(n, s): '''Redefined", "return not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is true if X is a", "core.unify(flag, Atomic(f.name)) is not None} if not self.flags: return False return self.pick_flag(flag, value)", "return the same type of number as its input.''' if not x: return", "isinstance(name, Atomic) and arity.value == 0: return self.unify(term, name) if isatom(name) and arity.value", ": math.ceil, 'truncate/1' : math.trunc, 'float/1' : float, 'abs/1' : operator.abs, 'sign/1' :", "not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist): from ..parser", "name of AtomL, Length is the number of characters of the name of", "atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not", "ww = wwtt.value[1] #from copy import deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst =", "if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and", "a2): return False return True return False PREDICATES = { # Term unification", "iff the character code for the one-char atom Char is Code.''' def execute(self,", "need to manage ZeroDivisionError errors on our own.''' if not y: from ..core", "for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name) != 1:", "set of variables defined as the set difference of the variable set of", "wider than the single # goal, even when using parentheses! '\\+/1' : Not_1,", "element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name) != 1: self.throw_type_error('character',", ": operator.neg, '//2' : divide, '///2' : intdivide, 'mod/2' : module, 'rem/2' :", "8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is true iff integer Length", "execute(self, atom, charlist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom):", "head, body): if not self.clauses: return False c = self.clauses.pop(0) return self.unify(Compound('clause', head,", "if X is a member of the set I or F and is", "preceeds Y or X and Y are identical terms.\"\"\" def execute(self, x, y):", "range(len(s) + 1)] elif isvariable(atom1): s = atom12.name if not s.endswith(atom2.name): return False", "the arguments of T.''' from ..core import deref if isinstance(term, Variable): if term.isfree():", "x ** y with x < 0 is defined only when y is", "variable set of V and the existential variables set of the term G", "isvariable(number): for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not", "the ordering of two terms. '@<'(X, Y) is true iff X preceeds Y.\"\"\"", "set() if isinstance(term, Atomic) or isvariable(term): return s if term.name == '^' and", "because of what is probably a parser # bug: the operator's \"scope\" is", "template, goal, instances): fvs = free_variable_set(goal, template) self.witness = Compound('witness', *fvs) if fvs", "_create_solution_list(self, s): return [] if s == List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term,", "'round/1' : round, 'ceiling/1' : math.ceil, 'truncate/1' : math.trunc, 'float/1' : float, 'abs/1'", "then G is the iterated goal term of Goal * else G is", "is the empty set * else if T unifies with ^(V, G) then", "if not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12) and not", "'atan/1' : math.atan, 'exp/1' : math.exp, 'log/1' : logarithm, 'sqrt/1' : squareroot, '>>/2'", "= pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer', arity)", "access permission to a user-defined # procedure are handled directly by the database", "0 end = len(n) - (after.value if isinstance(after, Atomic) else 0) self.data =", "if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) # error on access permission to a", "NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2,", "if not isinstance(term, Compound): self.throw_type_error('compound', term) if n.value < 0: self.throw_domain_error('not_less_than_zero', n) if", "clause in the database which corresponds to a term H :- B which", "of the latter such that the latter term results from replacing each variable", "def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2", "isinstance(term, Atomic) or isvariable(term): return s if term.name == '^' and term.arity ==", "is true iff evaluating E1 and E2 as expressions the corresponding arithmetic value", "return self.unify(term, t) elif len(elements) > 1: name = elements.head.name t = Compound(name,", "* Term is an atomic term equal to Name and Arity is 0.'''", "of Goal for each different instantiation of the free variables in it. The", "return self.unify(code, Atomic(c)) else: return ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list)", "classes def variable_set(term): '''The variable set Sv of a term T is a", "== 2: name, arity = pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not", "or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 == v2", "information (ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is true iff:", "pi) self.clauses_to_unify = [] self.clauses_to_remove = [] procedure = self.kb.search(head) if not procedure:", "a term V is a set of variables defined as the set difference", "(after.value if isinstance(after, Atomic) else 0) self.data = [] while start <= end:", "V.''' def execute(self, x): return not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X) is true", "PrologEvaluationError('undefined') if not x and y < 0: from ..core import PrologEvaluationError raise", "using the proper unify_with_occur_check/2 predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If", "8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff call(Term) is false.\"\"\" #", "'''Two terms are variants if there is a bijection s of the variables", "t == v if isvariable(t) and isvariable(v): return True if isinstance(t, Compound) and", "class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff the database contains at least one", "arity)) if isinstance(term, Variable): if isinstance(name, Atomic) and arity.value == 0: return self.unify(term,", "if not s.startswith(atom1.name): return False else: i = len(atom1.name) self.data = [(s[:i], s[i:],", "return f def power(x, y): '''Redefined w.r.t. Python because in ISO Prolog x", "not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list',", "+callable_term, ?list) bagof(Template, Goal, Instances) assembles as a list the solutions of Goal", "x.name == '_' and y.name == '_'): return False return x == y", "'log/1' : logarithm, 'sqrt/1' : squareroot, '>>/2' : rightshift, '<</2' : leftshift, '/\\\\/2'", "= Compound(name.name, *t) return self.unify(term, c) return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term)", "'\\\\='(X, Y) is true iff X and Y are not unifiable.\"\"\" def execute(self,", "copysign(1, x) return int(s) if isinstance(x, int) else s def float_integer_part(x): '''Redefined w.r.t.", "class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It is used to remove from the", "= [] for arg in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args) return term", "islist(codelist): for element in codelist.as_list(): if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element)", "a user-defined # procedure is handled directly by the database t = tuple(Variable('_')", "n2, n1 + n2)] return self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1, atom2, atom12):", "Term unification (ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and Y", "value currently associated with it.''' def execute(self, flag, value): from .. import core", "False return x >= y ### ### Term creation and decomposition (ISO 8.5)", "'\\=/2' : NotUnifiable_2, # Type testing (ISO 8.3) 'var/1' : Var_1, 'atom/1' :", "isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff X is a member of", "8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It is used to add", "length.value < 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero', after)", "if t2.name in mgu: # Still preserve the binding for t2 just in", "all its clauses, leaving the database in the same state as if the", "if isinstance(term, Atomic): return self.unify(term, name) and self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)):", "unifiable.\"\"\" def execute(self, x, y): # TODO prologlib crashes if you attempt to", "mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu) return True return False ###", "= deref(t) v = deref(v) if isinstance(t, Atomic) and isinstance(v, Atomic): return t", "x // y is valid only when x and y are integers. Also,", "if T is a compound term then Sv is the union of the", "in term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term): '''The existential variables set EV of", "### Implementation defined hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal", "= ''.join([chr(code.value) for code in codelist.as_list()]) try: # the parser needs an End", "is used to add Clause to the database before all existing clauses of", "t = elements.head return self.unify(term, t) elif len(elements) > 1: name = elements.head.name", "if isvariable(number): for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and", "by the predicate indicator Pred and all its clauses, leaving the database in", "errors if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) # error on access permission to", "pi) self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff the", "value) def pick_flag(self, flag, value): f = self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value,", "len(elements) > 1: name = elements.head.name t = Compound(name, *elements.as_list()[1:]) return self.unify(term, t)", "Python because in ISO Prolog x // y is valid only when x", "return x / y def intdivide(x, y): '''Redefined w.r.t. Python because in ISO", "None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body) def reexecute(self, head, body): self.reset_substitution() return", "+ '.').read_term() return self.unify(number, n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars =", "'compound/1' : Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' : Number_1, # Term comparison (ISO", "core if core.unify(pi, indicator) is not None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi):", "def module(x, y): '''Redefined w.r.t. Python because in ISO Prolog mod(x, y) is", "(ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true iff the", "### Term unification (ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and", "List) is true iff List is a list whose elements are the one-char", "isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom', elements.head)", "name = elements.head.name t = Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else: return False", "if isinstance(term, Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l) if isinstance(term,", "h, b)) return self.pick_clause(head, body) def reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head, body)", "def execute(self, x): return x.arity == 0 and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term)", "self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses: return False", "Python because in ISO Prolog div(x, y) with x and y integers is", "not self.flags: return False return self.pick_flag(flag, value) def reexecute(self, flag, value): self.reset_substitution() if", "in self.s if e not in wt_list] from .. import core for wwtt,", "class Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff X is a member of the", "findall = Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S')) s = findall.substitution['S'] self.s =", "0.''' def execute(self, term, name, arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term)", "abolish(Pred) is true. It is used to remove from the database the procedure", "y is valid only when x and y are integers. Also, we need", "retrival and information (ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause", ": TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term creation and decomposition (ISO 8.5) 'functor/3'", "for c in charlist.as_list()] return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list)", "= [deref(c).name for c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist)", "self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # errors on the conversion of the clause", "and whose tail is a list of the arguments of Term.\"\"\" def execute(self,", "deref(term) if isinstance(term, Atomic): return set() s = set() if isinstance(term, Compound): for", "assembles as a list the solutions of a goal for each different instantiation", "return ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true", "self.indicators.pop() return self.unify(pi, i) ### ### Clause creation and destruction (ISO 8.9) ###", "procedure specified by the predicate indicator Pred and all its clauses, leaving the", "pi.name == '/' and pi.arity == 2: name, arity = pi.value[1:] if isvariable(name)", "ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and", "= term.predicate_indicator() functor = search_evaluable_functor(pi) if not functor: from ..core import PrologTypeError raise", "# TODO No undefined errors if isvariable(term): from ..core import PrologInstantiationError raise PrologInstantiationError()", "return s def existential_variable_set(term): '''The existential variables set EV of a term T", "are handled directly by the database from ..builtin import search_builtin if search_builtin(head): pi", "handled directly by the database t = tuple(Variable('_') for i in range(arity.value)) c", "and t2 retain validity # only in the context of the copy_term/2 built-in", "(not isvariable(instances) and (not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances) from .. import", "variable sets for each of the arguments of T.''' from ..core import deref", "else: head = clause body = Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self, head,", "s_next = [e for e in self.s if e not in wt_list] from", "which is a renamed copy of Term_1.''' def execute(self, t1, t2): from ..", "deref(t) v = deref(v) if isinstance(t, Atomic) and isinstance(v, Atomic): return t ==", ": float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' : power, 'sin/1' : math.sin, 'cos/1' :", "'''Redefined w.r.t. Python because in ISO Prolog x ** y with x <", "iff evaluating E1 and E2 as expressions the corresponding arithmetic values are not", "identical terms.\"\"\" def execute(self, x, y): # The Python __eq__ method does not", "use write_canonical/1 lst = [Atomic(c) for c in chars] return self.unify(charlist, List.from_list(lst)) class", "def convert_to_term(head): if head.arity == 0: return Atomic(head.name) from ..core import renamed_copy return", "are not identical terms.\"\"\" def execute(self, x, y): # The Python __ne__ method", "'\\+/1' : Not_1, 'not/1' : Not_1, 'repeat/0' : Repeat_0, # Atomic term processing", "[e.value[2] for e in wt_list] s_next = [e for e in self.s if", "deref t = deref(t) v = deref(v) if isinstance(t, Atomic) and isinstance(v, Atomic):", "isvariable(atom1) and isvariable(atom2): s = atom12.name self.data = [(s[:i], s[i:], s) for i", "pick_bag(self, template, goal, instances): wt = self.s[0] wt_list = [e for e in", "false if X is a member of the set V, A, or CT.'''", "the database from ..builtin import search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity))", "but the order in which each list is found is undefined.''' def _create_solution_list(self,", "or isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) from ..", "import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True", "of the free variables in that goal. Each list is a sorted list,", "Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles as a list the solutions of a", "term.value[1:]) pi = term.predicate_indicator() functor = search_evaluable_functor(pi) if not functor: from ..core import", "\"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true iff evaluating E1 and E2 as expressions", "(Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) return self.unify(term, c) return", "= [Atomic(c) for c in chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number,", "T which is a renamed copy of Term_1.''' def execute(self, t1, t2): from", "iff X preceeds Y or X and Y are identical terms.\"\"\" def execute(self,", "w.r.t. Python because in ISO Prolog div(x, y) with x and y integers", "Arithmetic evaluation (ISO 8.6) ### Simple arithmetic functors (ISO 9.1) ### Other arithmetic", "because in ISO Prolog mod(x, y) is valid only when x and y", "database before all existing clauses of the procedure whose predicate is equal to", "to be altered.''' def execute(self, flag, value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if", ": operator.add, '*/2' : operator.mul, '-/2' : operator.sub, '-/1' : operator.neg, '//2' :", "isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(atom): from ..core import deref", "Test the ordering of two terms. '@>='(X, Y) is true iff Y preceeds", "x / y def intdivide(x, y): '''Redefined w.r.t. Python because in ISO Prolog", "are unifiable.\"\"\" def execute(self, x, y): # TODO prologlib crashes if you attempt", "isnumber(term): self.throw_type_error('callable', term) from .. import core caller = core.Caller() caller._kb = self.kb", "isatom(subatom): self.throw_type_error('atom', subatom) if (not isvariable(before) and not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer',", "undefined.''' def _create_solution_list(self, s): solutions = [] if s == List.EMPTY_LIST else s.as_list()", "return x != y class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering of two", "database the procedure specified by the predicate indicator Pred and all its clauses,", "not s.endswith(atom2.name): return False else: i = s.index(atom2.name) self.data = [(s[:i], s[i:], s)]", "deref chars = [deref(c).name for c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist)", "Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l) if isinstance(term, Variable): #", "else: chars = [c.name for c in charlist.as_list()] return atom.name == ''.join(chars) class", "[(d, p) for (d, p) in self.data if len(n) - n.index(d, p) -", "isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value < 0:", "iff X is a member of the set A.''' def execute(self, x): return", "is true iff the database contains at least one dynamic procedure with a", "self.throw_type_error('character', element) if isvariable(number) or islist(charlist): from ..parser import PrologParser, InvalidTermException s =", "if not isvariant(a1, a2): return False return True return False PREDICATES = {", "### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is true iff: * the predicate", "template), g, Variable('S')) s = findall.substitution['S'] self.s = self._create_solution_list(s) if not self.s: return", "self.throw_instantiation_error() if not isvariable(char) and len(char.name) != 1: self.throw_type_error('character', char) if not isvariable(code)", "if elements.name == '.' and elements.arity == 2: if len(elements) == 1: t", "def execute(self, x, y): from .. import core return core.unify(x, y) is None", "an End Token n = PrologParser(s + '.').read_term() return self.unify(number, n) except InvalidTermException", "execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor fails. It has the", "@term) Test the ordering of two terms. '@<'(X, Y) is true iff X", "execute(self, clause): if clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2] else:", "True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It is used to add Clause", "term processing (ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is true", "return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true iff", "x and y are integers. Also, we need to manage ZeroDivisionError errors on", "x): return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff X", "= Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove = []", "the set V.''' def execute(self, x): return not isvariable(x) class Number_1(BuiltIn): '''number(@term) number(X)", "< 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero', after) n", "and destruction (ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It is", "creation and destruction (ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' :", "### Type testing (ISO 8.3) ### class Var_1(BuiltIn): '''var(@term) var(X) is true iff", "as follows: * if T is an atomic term, then Sv is the", "'''number(@term) number(X) is true if X is a member of the set I", "the Term hierarchy classes def variable_set(term): '''The variable set Sv of a term", "False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator)", "atom1) if not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12) and", "from ..core import BuiltIn ### ### Term unification (ISO 8.2) ### class Unify_2(BuiltIn):", "are not unifiable.\"\"\" def execute(self, x, y): from .. import core return core.unify(x,", "the set A.''' def execute(self, x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is", "p) for (d, p) in self.data if len(n) - n.index(d, p) - len(d)", "False ### ### Arithmetic evaluation (ISO 8.6) ### Simple arithmetic functors (ISO 9.1)", "self._create_solution_list(s) if not self.s: return False return self.pick_bag(template, goal, instances) def reexecute(self, template,", "execute(self, pi): if not isvariable(pi) and not (pi.name == '/' and pi.arity ==", "self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head, Compound) and len(elements) > 1: self.throw_type_error('atomic', elements.head)", ":- Body. It is used to remove those unifying clauses from the database.'''", "isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return self.unify(length, size)", "true iff X and Y are not identical terms.\"\"\" def execute(self, x, y):", "E1 and E2 as expressions the corresponding arithmetic values are not equal.\"\"\" def", "len(d) == length.value] if isinstance(after, Atomic): self.data = [(d, p) for (d, p)", "number of characters of the name of the atom Atom.''' def execute(self, atom,", "the name of AtomR.''' def execute(self, atom, before, length, after, subatom): if isvariable(atom):", "f def power(x, y): '''Redefined w.r.t. Python because in ISO Prolog x **", "if s == List.EMPTY_LIST else s.as_list() solutions = list(set(solutions)) solutions.sort() return solutions ###", "if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if (not", "[e for e in self.s if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for e", "control (ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff call(Term) is", "mgu) if isinstance(term, Compound): args = [] for arg in term.value[1:]: args.append(ground(arg, mgu))", "class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering of two terms. '@>='(X, Y) is", "if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value, int): self.throw_type_error('integer', arity) if not", "raise PrologTypeError('number', term) def search_evaluable_functor(name): import math import operator d = {'+/2' :", "self.s if isvariant(wt.value[1], e.value[1])] t_list = [e.value[2] for e in wt_list] s_next =", "y < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') return float(x ** y)", "isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff X is a member", "self.unify(term, name) and self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name) and", "list(term.value[1:])) return self.unify(elements, l) if isinstance(term, Variable): # elements is a list if", "ispartiallist(codelist): self.throw_type_error('list', codelist) for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(element):", "the list whose only element is Term, or * Term is a compound", "'''atom_length(+atom, ?integer) atom_length(Atom, Length) is true iff integer Length equals the number of", "unifies with ^(V, G) then EV is the union of the variable set", "PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x & y def bitor(x, y): '''Redefined w.r.t.", "from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import log return log(x) def", "iff characters of the name of the atom Atom_12 are the result of", "charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if", "### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true iff evaluating E1 and", "true iff X and Y are unifiable.\"\"\" def execute(self, x, y): # TODO", "return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars = [Atomic(c) for", "Atomic(pi)) return functor(*args) from ..core import PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name): import", "codelist.as_list(): if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist):", "0: return Atomic(head.name) from ..core import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI)", "if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value)", "'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2, #", "sorted list, but the order in which each list is found is undefined.'''", "empty set * else if T unifies with ^(V, G) then EV is", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x | y def bitnot(x): '''Redefined", "name) if isinstance(term, Atomic): return self.unify(term, name) and self.unify(arity, Atomic(0)) if isinstance(term, (Compound,", "y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and Y are NSTO (Not Subject", "l) if isinstance(term, Variable): # elements is a list if elements.name == '.'", "0: t = (Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) return", "PrologTypeError('float', Atomic(x)) from math import modf f, i = modf(x) return i def", "== subatom.value] if not self.data: return False return self.pick_data(atom, before, length, after, subatom)", "our own.''' if not isinstance(n, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n))", "if a is not None and a2 != a: return False else: bijection[a1]", "not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2): s =", "atom.name] return self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value) for code in codelist.as_list()] return", "self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi): if not self.indicators:", "term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances) assembles as a list", "** y) def logarithm(x): '''Redefined w.r.t. Python because we need to manage ValueError", "because in ISO Prolog float_integer_part(x) is valid only when x is a float.'''", "= evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable)", "Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before,", "variable_set(term): '''The variable set Sv of a term T is a set of", "correspond to the successive characters of the name of atom Atom, and the", "expressions the corresponding arithmetic value of E1 is greater than the corresponding arithmetic", "Atomic(f.value)) class Halt_0(BuiltIn): '''halt halt neither succeeds nor fails. It has the side", "renamed copy of Term_1.''' def execute(self, t1, t2): from .. import core #t", "< 0: self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value): return False return self.unify(arg, term.value[n.value])", "to a user-defined # procedure is handled directly by the database t =", "if isvariable(term) and not isatom(name) and arity.value > 0: self.throw_type_error('atom', name) if isinstance(term,", "goal set_prolog_flag(Flag, Value) enables the value associated with a Prolog flag to be", "'''setof(?term, +callable_term, ?list) setof/3 assembles as a list the solutions of a goal", "import core if core.unify(pi, indicator) is not None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self,", "term, arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer', n)", "in self.data if len(n) - n.index(d, p) - len(d) == after.value] if isinstance(subatom,", "unification (ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and Y are", "arithmetic values are not equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2):", "is Result.\"\"\" def execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return", "Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S')) s = findall.substitution['S'] self.s = self._create_solution_list(s) if", "s = ''.join([chr(code.value) for code in codelist.as_list()]) try: # the parser needs an", "character of the name.''' def execute(self, atom, codelist): if not isvariable(atom) and not", "self.throw_instantiation_error() if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom) and", "as expressions the corresponding arithmetic values are equal.\"\"\" def execute(self, e1, e2): if", "the context of the copy_term/2 built-in mgu = core.unify(t2, t) if mgu is", "Y are not unifiable.\"\"\" def execute(self, x, y): from .. import core return", "class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles as a list the solutions of", "'''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before, Length, After, Sub_atom) is true iff", "head) is not None and core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause', h, b))", "to manage TypeError errors (e.g. n as float) on our own.''' if not", "chars = [chr(code.value) for code in codelist.as_list()] return atom.name == ''.join(chars) class CharCode_2(BuiltIn):", ": Compound_1, 'nonvar/1' : Nonvar_1, 'number/1' : Number_1, # Term comparison (ISO 8.4)", "# outside the context of the copy_term/2 built-in if t2.name in mgu: #", "8.2) '=/2' : Unify_2, '\\=/2' : NotUnifiable_2, # Type testing (ISO 8.3) 'var/1'", "else: return ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is", "be altered.''' def execute(self, flag, value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not", "math.sin, 'cos/1' : math.cos, 'atan/1' : math.atan, 'exp/1' : math.exp, 'log/1' : logarithm,", "islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances) from .. import core caller = core.Caller()", "then Sv is the union of the variable sets for each of the", "copy_term/2 built-in mgu = core.unify(t2, t) if mgu is not None: if mgu:", "'='(X, Y) is true iff X and Y are unifiable.\"\"\" def execute(self, x,", "if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length)", "[] for arg in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args) return term class", "return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X) is true iff X is a", "s def leftshift(n, s): '''Redefined w.r.t. Python because we need to manage TypeError", "== 2): self.throw_type_error('predicate_indicator', pi) self.indicators = [] for i in self.kb: n, a", "i = s.index(atom2.name) self.data = [(s[:i], s[i:], s)] elif isvariable(atom2): s = atom12.name", "!= v.arity: return False bijection = {} for a1, a2 in zip(t.value[1:], v.value[1:]):", "NumberCodes_2, # Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2,", "the iterated goal term of Goal * else G is T.''' if term.name", "if isvariable(term): from ..core import PrologInstantiationError raise PrologInstantiationError() if term.arity == 0 and", "except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars = [chr(code.value) for code in codelist.as_list()] return", "term variables bindings # outside the context of the copy_term/2 built-in if t2.name", "own.''' if not isinstance(n, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if", "if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist): from", "True ### ### Atomic term processing (ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer)", "remove those unifying clauses from the database.''' def execute(self, clause): if clause.predicate_indicator() ==", "raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y, int): return x // y return", "list, but the order in which each list is found is undefined.''' def", "if isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length) and not", "if not isinstance(s, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n", "execute(self, x): if isvariable(x): self.throw_instantiation_error() if not isvariable(x) and not isnumber(x) and not", "x, y): # The Python __eq__ method does not hold Prolog # semantics", "core.unify(x, y) is None ### ### Type testing (ISO 8.3) ### class Var_1(BuiltIn):", "or ispartiallist(charlist): chars = [Atomic(c) for c in atom.name] return self.unify(charlist, List.from_list(chars)) else:", "arity) if isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term) and not", "not None and core.unify(b, body) is not None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head,", "for element in charlist.as_list(): if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if", "= [] procedure = self.kb.search(head) if not procedure: return False from .. import", "if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return self.unify(length,", "the number of characters of the name of AtomL, Length is the number", "if isvariable(atom1) and isvariable(atom2): s = atom12.name self.data = [(s[:i], s[i:], s) for", "return self.unify(elements, l) if isinstance(term, Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements,", "class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances) assembles as a list the", "a is not None and a2 != a: return False else: bijection[a1] =", "is not None and core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause)", "flag, value): f = self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class Halt_0(BuiltIn):", "v.name or t.arity != v.arity: return False bijection = {} for a1, a2", "the value currently associated with it.''' def execute(self, flag, value): from .. import", "is true iff call(Term) is false.\"\"\" def execute(self, term): if isvariable(term): self.throw_instantiation_error() if", "a bijection s of the variables of the former to the variables of", "each element is the character code for the corresponding character of the name.'''", "to remove those unifying clauses from the database.''' def execute(self, clause): if clause.predicate_indicator()", "and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and not", "False # the order in which predicate indicators are found by # current_predicate/1", "Atom_2 to the characters of the name of the atom Atom_1.''' def execute(self,", "= [] for i in self.kb: n, a = i.split('/') indicator = Compound('/',", "n.value < 0: self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value): return False return self.unify(arg,", "int) and isinstance(y, int): return x // y return x / y def", "Compound): args = [] for arg in term.value[1:]: args.append(ground(arg, mgu)) return Compound(term.name, *args)", "of each element is the character code for the corresponding character of the", "is a term defined recursively as follows: * if T unifies with ^(_,", "not x: return 0 if isinstance(x, int) else 0.0 from math import copysign", "indicator) is not None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi)", "and List is the list whose only element is Term, or * Term", "t = t1._copy_term() # Can't directly use BuiltIn.unify because the bindings # between", "when x is a float.''' if not isinstance(x, float): from ..core import PrologTypeError", "and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List)", "v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true iff evaluating E1 and", "Arithmetic comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2,", "module, 'rem/2' : module, 'floor/1' : math.floor, 'round/1' : round, 'ceiling/1' : math.ceil,", "true iff characters of the name of the atom Atom_12 are the result", "clauses from the database.''' def execute(self, clause): if clause.predicate_indicator() == ':-/2': head =", "Clause.''' def execute(self, clause): head = clause.value[1] if clause.predicate_indicator() == ':-/2' else clause", "Term is an atomic term equal to Name and Arity is 0.''' def", ": logarithm, 'sqrt/1' : squareroot, '>>/2' : rightshift, '<</2' : leftshift, '/\\\\/2' :", "return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0))", "Atom_1.''' def execute(self, atom1, atom2, atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2)", "import deref if isinstance(term, Variable): if term.isfree(): return {term} else: term = deref(term)", "is true iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true", "the ordering of two terms. '\\=='(X, Y) is true iff X and Y", "is a member of the set V, A, or CT.''' def execute(self, x):", "empty set * else if T is a variable then Sv is {T}", "> 1: self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head, Compound) and len(elements) > 1:", "free_variable_set(goal, template) self.witness = Compound('witness', *fvs) if fvs else Atomic('witness') g = iterated_goal_term(goal)", "reexecute(self, atom1, atom2, atom12): self.reset_substitution() if not self.data: return False return self.pick_data(atom1, atom2,", "'''repeat repeat is true. repeat is re-executable. ''' def execute(self): return True def", "is a set of variables defined as the union of the variable set", "AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is true iff integer Length equals the number", "if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term): # TODO", "bitor(x, y): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g.", "and isinstance(y, float): from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') if not x and", "variable X in the former by Xs.''' from ..core import deref t =", "head is the functor name of Term and whose tail is a list", "atom, before, length, after, subatom): if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and not", "true if X is a member of the set I or F and", "if value: return value else: return ground(term.binding(), mgu) if isinstance(term, Compound): args =", "class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) #", "= clause body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) #", "t1, t2): from .. import core #t = core.renamed_copy(t1) t = t1._copy_term() #", "1: self.throw_type_error('character', element) if isvariable(atom): from ..core import deref chars = [deref(c).name for", "body), c) def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity ==", "a variable X not occurring in Template or Goal would be instantiated by", "and x.name == '_' and y.name == '_'): return False return x >=", "the same state as if the procedure identified by Pred had never existed.'''", "self.throw_type_error('list', instances) from .. import core caller = core.Caller() caller._kb = self.kb values", "0 and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X) is true iff X is", "from .. import core return core.unify(x, y) is None ### ### Type testing", "from math import sqrt return sqrt(x) def rightshift(n, s): '''Redefined w.r.t. Python because", "List is the list whose head is the functor name of Term and", "max_arity related error if isinstance(term, Atomic): l = List(term) return self.unify(elements, l) if", "of variables defined as the union of the variable set of V and", "iff Flag is a flag supported by the processor, and Value is the", "8.5) 'functor/3' : Functor_3, 'arg/3' : Arg_3, '=../2' : Univ_2, 'copy_term/2' : CopyTerm_2,", "self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) # FIXME this should use write_canonical/1 lst =", "if not procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from ..", "Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause)", "pick_flag(self, flag, value): f = self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value)) class", "if not isinstance(x, float): from ..core import PrologTypeError raise PrologTypeError('float', Atomic(x)) from math", "PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core import PrologTypeError raise PrologTypeError('number', term)", "PrologTypeError('number', term) def search_evaluable_functor(name): import math import operator d = {'+/2' : operator.add,", "and x.name == '_' and y.name == '_'): return False return x ==", "Arg_3, '=../2' : Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic evaluation (ISO 8.6) 'is/2'", "from ..parser import Atomic, Variable, Compound, List from ..parser import isvariable, isatom, isnumber,", "if value.name not in f.allowed: culprit = Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name]", "is false if X is a member of the set V, A, or", "term) from .. import core caller = core.Caller() caller._kb = self.kb result =", "value.name not in f.allowed: culprit = Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] =", "@evaluable) '=:='(E1, E2) is true iff evaluating E1 and E2 as expressions the", "def execute(self, number, charlist): if isvariable(number) and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element", "if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1", "variable then Sv is {T} * else if T is a compound term", "isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO Missing max_arity related error", ": math.cos, 'atan/1' : math.atan, 'exp/1' : math.exp, 'log/1' : logarithm, 'sqrt/1' :", "flag) f = core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag) if value.name", "add Clause to the database before all existing clauses of the procedure whose", "Pred had never existed.''' def execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if pi.name ==", "and isvariable(y) and x.name == '_' and y.name == '_'): return False return", ": CopyTerm_2, # Arithmetic evaluation (ISO 8.6) 'is/2' : Is_2, # Arithmetic comparison", "= modf(x) return i def float_fractional_part(x): '''Redefined w.r.t. Python because in ISO Prolog", "atom_chars(Atom, List) is true iff List is a list whose elements are the", "directly by the database from ..builtin import search_builtin if search_builtin(head): pi = Compound('/',", "isvariable(element): self.throw_instantiation_error() if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(atom): chars", "':-/2' else clause if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # errors on", "the predicate indicator Pred and all its clauses, leaving the database in the", "is not None: if mgu: t2.apply(mgu) # Do not propagate renamed term variables", "import core return core.unify(x, y) is None ### ### Type testing (ISO 8.3)", "int(s) if isinstance(x, int) else s def float_integer_part(x): '''Redefined w.r.t. Python because in", "ordering of two terms. '@<'(X, Y) is true iff X preceeds Y.\"\"\" def", "isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not isinstance(y, int):", "identified by Pred had never existed.''' def execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if", "of a term T with respect to a term V is a set", ": Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause creation and destruction (ISO 8.9) 'asserta/1'", "= Compound('witness', *fvs) if fvs else Atomic('witness') g = iterated_goal_term(goal) findall = Findall_3(self.kb)", "codes corresponding to a character sequence of Number which could be output.''' def", "'nonvar/1' : Nonvar_1, 'number/1' : Number_1, # Term comparison (ISO 8.4) '@=</2' :", "x.arity == 0 and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if", "call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term) is true iff call(Term) is", "T is an atomic term, then Sv is the empty set * else", "Atomic(y)) return x % y def sign(x): '''Redefined w.r.t. Python because in ISO", "a term H :- B which unifies with Head :- Body.''' def execute(self,", "return [] if s == List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list)", "all variables in X by new variables.''' def execute(self, template, goal, instances): if", "not procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from .. import", "procedure identified by Pred had never existed.''' def execute(self, pi): if isvariable(pi): self.throw_instantiation_error()", "of atom Atom, and the value of each element is the character code", "+ l) return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom,", "= self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn):", "if isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances) and (not islist(instances) and not ispartiallist(instances))):", "\"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true iff the value of evaluating Expression as", "t in zip(wt_list, t_list): ww = wwtt.value[1] #from copy import deepcopy #subst =", "whose tail is a list of the arguments of Term.\"\"\" def execute(self, term,", "if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(atom): from ..core import", "T.''' from ..core import deref if isinstance(term, Variable): if term.isfree(): return {term} else:", "name of Term and whose tail is a list of the arguments of", "< 0 and isinstance(y, float): from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') if not", "return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is true iff X is a member", "name) and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if isinstance(name, Atomic) and arity.value ==", "v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true iff evaluating", "= modf(x) return f def power(x, y): '''Redefined w.r.t. Python because in ISO", "# TODO prologlib crashes if you attempt to unify two STO terms by", "which predicate indicators are found by # current_predicate/1 is implementation dependent i =", "n as float) on our own.''' if not isinstance(n, int): from ..core import", "if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) from .. import core #", "Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It is used to remove from the database", "of two terms. '@=<'(X, Y) is true iff X preceeds Y or X", "core for wwtt, t in zip(wt_list, t_list): ww = wwtt.value[1] #from copy import", "If X and Y are NSTO (Not Subject To Occur-check) then '\\\\='(X, Y)", "'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2'", "False return self.pick_flag(flag, value) def reexecute(self, flag, value): self.reset_substitution() if not self.flags: return", "caller.solve(goal) while result: from copy import deepcopy v = ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term())", "len(n) - n.index(d, p) - len(d) == after.value] if isinstance(subatom, Atomic): self.data =", "from math import log return log(x) def squareroot(x): '''Redefined w.r.t. Python because we", "class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering of two terms. '\\=='(X, Y) is", "atom2, atom12) def reexecute(self, atom1, atom2, atom12): self.reset_substitution() if not self.data: return False", "x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering of two terms.", "if isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero', after) n = atom.name start =", "result = caller.solve(term) return not result class Repeat_0(BuiltIn): '''repeat repeat is true. repeat", "y) def logarithm(x): '''Redefined w.r.t. Python because we need to manage ValueError errors", "= variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This should be distributed", "return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true iff Term_2 unifies", "true iff Flag is a flag supported by the processor, and Value is", "return atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true", "term, then EV is the empty set * else if T unifies with", "X and Y are unifiable.\"\"\" def execute(self, x, y): # TODO prologlib crashes", "in chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is", "not (pi.name == '/' and pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators = []", "with a clause Clause which unifies with Head :- Body. It is used", "class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff PI is a predicate indicator for", "not None): self.clauses_to_unify.append(Compound('clause', h, b)) self.clauses_to_remove.append(clause) return self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution()", "the union of the variable set of V and the existential variable set", "is Term, or * Term is a compound term and List is the", "of the set V, A, or CT.''' def execute(self, x): return isnumber(x) ###", "### ### Atomic term processing (ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom,", "(ISO 8.5) 'functor/3' : Functor_3, 'arg/3' : Arg_3, '=../2' : Univ_2, 'copy_term/2' :", "if X is a member of the set A or I or F", "one-char atom Char is Code.''' def execute(self, char, code): if isvariable(char) and isvariable(code):", "attempt to unify two STO terms by =/2 # instead of using the", "if isinstance(subatom, Atomic): self.data = [(d, p) for (d, p) in self.data if", "we need to manage TypeError errors (e.g. x or y as float) on", "V and the existential variables set of the term G * else EV", "need to manage ValueError errors (e.g. for log(0)) on our own.''' if not", "in X by new variables.''' def execute(self, template, goal, instances): if isvariable(goal): self.throw_instantiation_error()", "return self.unify(codelist, List.from_list(lst)) ### ### Implementation defined hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn):", "isvariable(v): return True if isinstance(t, Compound) and isinstance(v, Compound): if t.name != v.name", "in the context of the copy_term/2 built-in mgu = core.unify(t2, t) if mgu", "= self.kb.search(head) if not procedure: return False from .. import core for clause", "compound term with a functor whose identifier is Name and arity Arity, or", "database.''' def execute(self, pi): if not isvariable(pi) and not (pi.name == '/' and", "''.join([chr(code.value) for code in codelist.as_list()]) try: # the parser needs an End Token", "< y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering of two terms. '@>'(X,", "then EV is the empty set * else if T unifies with ^(V,", "on our own.''' if x < 0 and isinstance(y, float): from ..core import", "not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if not isvariable(subatom) and not isatom(subatom):", "class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List) is true iff: * Term", "element) if isvariable(number) or islist(charlist): from ..parser import PrologParser, InvalidTermException s = ''.join([char.name", "Atomic): self.data = [(d, p) for (d, p) in self.data if d ==", "of the clause body to a # goal and on access permission to", "9.3) ### Bitwise functors (ISO 9.4) ### class Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression)", "t2): from .. import core #t = core.renamed_copy(t1) t = t1._copy_term() # Can't", "and Y are not unifiable.\"\"\" def execute(self, x, y): from .. import core", "(isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses = [] procedure = self.kb.search(head) if not", "convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity == 0: return Atomic(head.name)", "'integer/1' : Integer_1, 'float/1' : Float_1, 'atomic/1' : Atomic_1, 'compound/1' : Compound_1, 'nonvar/1'", "PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x | y def bitnot(x): '''Redefined w.r.t. Python", "evaluating E1 and E2 as expressions the corresponding arithmetic values are equal.\"\"\" def", "import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x, int): from ..core import PrologTypeError raise", "self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true iff List", "in self.data if n.index(d, p) == before.value] if isinstance(length, Atomic): self.data = [(d,", "in self.kb: n, a = i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a))) from ..", "t1 and t2 retain validity # only in the context of the copy_term/2", "E1 is greater than the corresponding arithmetic value of E2.\"\"\" def execute(self, e1,", "= mgu.get(term.name) if value: return value else: return ground(term.binding(), mgu) if isinstance(term, Compound):", "call(Term) is false.\"\"\" def execute(self, term): if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term)", "isvariable(after) and not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer', after) if isnumber(before) and before.value", "l) return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s)))", "False return self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1, atom2, atom12): c = self.data.pop(0)", "if you attempt to unify two STO terms by =/2 # instead of", "head, body): if isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body)", "preceeds Y.\"\"\" def execute(self, x, y): return x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term,", "flag, value): self.reset_substitution() if not self.flags: return False return self.pick_flag(flag, value) def pick_flag(self,", "isvariable(number): for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not", "def free_variable_set(t, v): '''The free variable set FV of a term T with", "value associated with a Prolog flag to be altered.''' def execute(self, flag, value):", "ISO Prolog float_integer_part(x) is valid only when x is a float.''' if not", "Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars = [Atomic(c) for c in", "member of the set A or I or F and is false if", "self.throw_type_error('callable', head) # errors on the conversion of the clause body to a", "the renamed copy of t1 and t2 retain validity # only in the", "not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length) and length.value < 0:", "Term is an atomic term and List is the list whose only element", "# goal and on access permission to a user-defined # procedure are handled", "self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value): return False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn):", "set CT.''' def execute(self, x): return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X)", "Repeat_0, # Atomic term processing (ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3,", "Y) is true iff X preceeds Y.\"\"\" def execute(self, x, y): return x", "== '^' and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s def", "Variable, Compound, List from ..parser import isvariable, isatom, isnumber, islist, ispartiallist, iscallable from", "comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true iff", "isinstance(t, Atomic) and isinstance(v, Atomic): return t == v if isvariable(t) and isvariable(v):", "instances) def pick_bag(self, template, goal, instances): wt = self.s[0] wt_list = [e for", "'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2'", "arithmetic functors (ISO 9.1) ### Other arithmetic functors (ISO 9.3) ### Bitwise functors", "p) for (d, p) in self.data if n.index(d, p) == before.value] if isinstance(length,", "not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x %", "iff X is not a member of the set V.''' def execute(self, x):", "clauses of the procedure whose predicate is equal to the functor of the", "iff List is a list whose elements are the character codes corresponding to", "@term) Test the ordering of two terms. '@>='(X, Y) is true iff Y", "culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is", "len(atom.name) - (b + l) return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after,", "Term creation and decomposition (ISO 8.5) ### class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar,", "number_codes(Number, List) is true iff List is a list whose elements are the", "and ispartiallist(charlist): self.throw_instantiation_error() if isvariable(number): for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if", "the ordering of two terms. '@=<'(X, Y) is true iff X preceeds Y", "float) on our own.''' if not isinstance(n, int): from ..core import PrologTypeError raise", "a variable or an atomic term, then EV is the empty set *", "y with x < 0 is defined only when y is an integer,", "ZeroDivisionError errors on our own.''' if not y: from ..core import PrologEvaluationError raise", "unify_with_occur_check/2 predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X and Y", "or islist(charlist): from ..parser import PrologParser, InvalidTermException s = ''.join([char.name for char in", "body): if isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body) or", "X and Y are not unifiable.\"\"\" def execute(self, x, y): from .. import", "and length.value < 0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return self.unify(length, size) class", "f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true iff Flag", "s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s def iterated_goal_term(term): '''The iterated goal term G", "from the database the procedure specified by the predicate indicator Pred and all", "variable_set(t) vsv = variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This should", "after, subatom): self.reset_substitution() if not self.data: return False return self.pick_data(atom, before, length, after,", "charlist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist):", "tail is a list of the arguments of Term.\"\"\" def execute(self, term, elements):", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x // y def module(x, y):", "s.startswith(atom1.name): return False else: i = len(atom1.name) self.data = [(s[:i], s[i:], s)] else:", "whose names are the successive characters of the name of atom Atom.''' def", "unifies with the list of values to which a variable X not occurring", "'floor/1' : math.floor, 'round/1' : round, 'ceiling/1' : math.ceil, 'truncate/1' : math.trunc, 'float/1'", "we need to manage ZeroDivisionError errors on our own.''' if not y: from", "c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class", "Is_2(BuiltIn): \"\"\"is(?term, @evaluable) 'is'(Result, Expression) is true iff the value of evaluating Expression", "new variables.''' def execute(self, template, goal, instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable',", "'@<'(X, Y) is true iff X preceeds Y.\"\"\" def execute(self, x, y): return", "def reexecute(self, atom1, atom2, atom12): self.reset_substitution() if not self.data: return False return self.pick_data(atom1,", "the variables of the former to the variables of the latter such that", "math.floor, 'round/1' : round, 'ceiling/1' : math.ceil, 'truncate/1' : math.trunc, 'float/1' : float,", "to unify two STO terms by =/2 # instead of using the proper", "def pick_flag(self, flag, value): f = self.flags.pop() return self.unify(flag, Atomic(f.name)) and self.unify(value, Atomic(f.value))", "iff X is a member of the set CT.''' def execute(self, x): return", "return functor(*args) from ..core import PrologTypeError raise PrologTypeError('number', term) def search_evaluable_functor(name): import math", "def execute(self, template, goal, instances): fvs = free_variable_set(goal, template) self.witness = Compound('witness', *fvs)", "of the atom Atom.''' def execute(self, atom, length): if isvariable(atom): self.throw_instantiation_error() if not", "1)] elif isvariable(atom1): s = atom12.name if not s.endswith(atom2.name): return False else: i", "x.arity == 0 and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X) is true iff", "== ':-/2' else clause if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # errors", "true iff Term_2 unifies with a term T which is a renamed copy", "character code for the corresponding character of the name.''' def execute(self, atom, codelist):", "characters of the name of atom Atom.''' def execute(self, atom, charlist): if not", "t = (Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) return self.unify(term,", "return False if not procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi)", "v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 > v2 class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable,", "there is a bijection s of the variables of the former to the", "while start <= end: for i in range(start, end + 1): self.data.append((n[start:i], start))", "Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic evaluation (ISO 8.6) 'is/2' : Is_2, #", "Number_1(BuiltIn): '''number(@term) number(X) is true if X is a member of the set", "term) if n.value < 0: self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value): return False", "Atomic(y)) return x // y def module(x, y): '''Redefined w.r.t. Python because in", "not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for element in charlist.as_list(): if isatom(element) and", "def execute(self, flag, value): from .. import core # for flags if not", "def execute(self, t1, t2): from .. import core #t = core.renamed_copy(t1) t =", "= convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is not None and core.unify(b, body) is", "# errors on the conversion of the clause body to a # goal", "< 0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom,", "Atomic) and arity.value == 0: return self.unify(term, name) if isatom(name) and arity.value >", "value of E1 is less than or equal to the corresponding arithmetic value", "self.pick_data(atom, before, length, after, subatom) def pick_data(self, atom, before, length, after, subatom): s,", "on our own.''' if not isinstance(n, int): from ..core import PrologTypeError raise PrologTypeError('integer',", "Python because we need to manage TypeError errors (e.g. n as float) on", "single # goal, even when using parentheses! '\\+/1' : Not_1, 'not/1' : Not_1,", "findall.execute(Compound('+', self.witness, template), g, Variable('S')) s = findall.substitution['S'] self.s = self._create_solution_list(s) if not", "result, expression): if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result, Atomic(c)) def evaluate_expression(term):", "Implementation defined hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag,", "\\+ does not work because of what is probably a parser # bug:", "but the order in which each list is found is undefined.''' def execute(self,", "= caller.solve(term) return not result class Repeat_0(BuiltIn): '''repeat repeat is true. repeat is", "for c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif isvariable(charlist) or islist(charlist) or ispartiallist(charlist):", "clause.value[2] else: head = clause body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head):", "* else if T is a compound term then Sv is the union", "isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) # Utility functions def free_variable_set(t, v): '''The free", "'<'(E1, E2) is true iff evaluating E1 and E2 as expressions the corresponding", "zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'): a = bijection.get(a1) if", "its input.''' if not x: return 0 if isinstance(x, int) else 0.0 from", "search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove", "islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for element in charlist.as_list(): if", "?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12) is true iff characters of the", "n2)] return self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1, atom2, atom12): self.reset_substitution() if not", "in which each list is found is undefined.''' def execute(self, template, goal, instances):", "self.s: return False return self.pick_bag(template, goal, instances) def pick_bag(self, template, goal, instances): wt", "is undefined.''' def _create_solution_list(self, s): solutions = [] if s == List.EMPTY_LIST else", "return term def isvariant(t, v): '''Two terms are variants if there is a", "'float_fractional_part/1' : float_fractional_part, '**/2' : power, 'sin/1' : math.sin, 'cos/1' : math.cos, 'atan/1'", "as expressions the corresponding arithmetic values are not equal.\"\"\" def execute(self, e1, e2):", "elements): if isvariable(term) and ispartiallist(elements): self.throw_instantiation_error() if not islist(elements) and not ispartiallist(elements): self.throw_type_error('list',", "'''Redefined w.r.t. Python because we need to manage ValueError errors (e.g. for log(0))", "permission to a user-defined # procedure are handled directly by the database from", "a character sequence of Number which could be output.''' def execute(self, number, charlist):", "x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering of two terms.", "the predicate of Head is public, and * there is a clause in", "True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value) is true iff Flag is a", "iff call(Term) is false.\"\"\" def execute(self, term): if isvariable(term): self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable',", "needs an End Token n = PrologParser(s + '.').read_term() return self.unify(number, n) except", "to the variables of the latter such that the latter term results from", "'''arg(+integer, +compound_term, ?term) arg(N, Term, Arg) is true iff the Nth argument of", "Variable): if isinstance(name, Atomic) and arity.value == 0: return self.unify(term, name) if isatom(name)", "not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and", "= len(s) a = len(atom.name) - (b + l) return (self.unify(before, Atomic(b)) and", "ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival", "sign(x): '''Redefined w.r.t. Python because in ISO Prolog sign(x) must return the same", "return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom, +atom)\\natom_concat(+atom, +atom, -atom) atom_concat(Atom_1, Atom_2, Atom_12)", "functor name of Term and whose tail is a list of the arguments", "the variable set of V and the existential variables set of the term", "the corresponding arithmetic values are equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or", "PrologTypeError('integer', Atomic(s)) return x | y def bitnot(x): '''Redefined w.r.t. Python because we", "not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags =", "true iff integer Length equals the number of characters of the name of", "is true iff List is a list whose elements are the one-char atoms", "y): # The Python __eq__ method does not hold Prolog # semantics for", "ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival and information (ISO 8.8) 'clause/2' :", "y as float) on our own.''' if not isinstance(x, int): from ..core import", "@nonvar) A goal set_prolog_flag(Flag, Value) enables the value associated with a Prolog flag", "current_predicate(PI) is true iff PI is a predicate indicator for one of the", "### ### Clause creation and destruction (ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause)", "is a variable then Sv is {T} * else if T is a", "Atomic(x)) from math import modf f, i = modf(x) return f def power(x,", "self.s = s_next return self.unify(t_list, instances) def _create_solution_list(self, s): return [] if s", "raise PrologTypeError('integer', Atomic(s)) return x | y def bitnot(x): '''Redefined w.r.t. Python because", "isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length) and not (isnumber(length)", "characters of the name of Sub_atom, and After is the number of characters", "= [(s[:i], s[i:], s)] elif isvariable(atom2): s = atom12.name if not s.startswith(atom1.name): return", "clause.predicate_indicator() == ':-/2' else clause if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) #", "isatom(name) and arity.value > 0: self.throw_type_error('atom', name) if isinstance(term, Atomic): return self.unify(term, name)", "start <= end: for i in range(start, end + 1): self.data.append((n[start:i], start)) start", "return (self.unify(before, Atomic(b)) and self.unify(length, Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class", "of two terms. '=='(X, Y) is true iff X and Y are identical", "as expressions the corresponding arithmetic value of E1 is less than or equal", "clause Clause which unifies with Head :- Body. It is used to remove", "?atom) sub_atom(Atom, Before, Length, After, Sub_atom) is true iff atom Atom can be", "self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length) and not (isnumber(length) and", "= s_next return self.unify(t_list, instances) def _create_solution_list(self, s): return [] if s ==", "[] for i in self.kb: n, a = i.split('/') indicator = Compound('/', Atomic(n),", "from .. import core # for flags if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag',", "'</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, #", "CurrentPredicate_1, # Clause creation and destruction (ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1' :", "Nonvar_1(BuiltIn): '''nonvar(@term) nonvar(X) is true iff X is not a member of the", "Atom_12 are the result of concatenating the characters of the name of the", "whatever system invoked Prolog, passing the value of X as a message.''' def", "in term.value[1:]: s.update(variable_set(arg)) else: # a list for e in term.as_list(): s.update(variable_set(e)) return", "a in term.value[1:]) pi = term.predicate_indicator() functor = search_evaluable_functor(pi) if not functor: from", "[(n1, n2, n1 + n2)] return self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1, atom2,", "isvariable(instances) and (not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances) from .. import core", "database contains at least one dynamic procedure with a clause Clause which unifies", "Nonvar_1, 'number/1' : Number_1, # Term comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2'", "user-defined # procedure are handled directly by the database from ..builtin import search_builtin", "FIXME this should use write_canonical/1 lst = [Atomic(ord(c)) for c in chars] return", "NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true iff List is a list", "term T with respect to a term V is a set of variables", "islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not isatom(elements.head) and len(elements) > 1:", "iff the value of evaluating Expression as an expression is Result.\"\"\" def execute(self,", "not self.s: return False return self.pick_bag(template, goal, instances) def pick_bag(self, template, goal, instances):", "or Goal would be instantiated by successive re-executions of \"call(Goal), X=Template\" after systematic", "0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name)) return self.unify(length, size) class AtomConcat_3(BuiltIn): '''atom_concat(?atom, ?atom,", "else s.as_list() solutions = list(set(solutions)) solutions.sort() return solutions ### ### Logic and control", "int))): self.throw_type_error('integer', length) if (not isvariable(after) and not (isnumber(after) and isinstance(after.value, int))): self.throw_type_error('integer',", "for each different instantiation of the free variables in it. The elements of", "copy_term/2 built-in if t2.name in mgu: # Still preserve the binding for t2", "* if T is an atomic term, then Sv is the empty set", "y integers is equivalent to intdiv(x, y). Also, we need to manage ZeroDivisionError", "ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable) '>'(E1, E2) is true iff evaluating E1 and E2 as", "e in wt_list] s_next = [e for e in self.s if e not", "self.throw_instantiation_error() if isvariable(number): for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number)", "arithmetic value of E1 is greater than or equal to the corresponding arithmetic", "Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom, List) is true iff List is", "'float/1' : float, 'abs/1' : operator.abs, 'sign/1' : sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1'", "Atomic): return self.unify(term, name) and self.unify(arity, Atomic(0)) if isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name),", "return s if term.name == '^' and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return", "Name, Arity) is true iff: * Term is a compound term with a", "raise PrologInstantiationError() if term.arity == 0 and term._isnumber(): return term.value if isinstance(term, Compound):", "The Python __eq__ method does not hold Prolog # semantics for anonymous variables", "t = deref(t) v = deref(v) if isinstance(t, Atomic) and isinstance(v, Atomic): return", "isvariable(number) and not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for element", "return ~x ### ### Arithmetic comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable)", "latter such that the latter term results from replacing each variable X in", "value): self.reset_substitution() if not self.flags: return False return self.pick_flag(flag, value) def pick_flag(self, flag,", "to the successive characters of the name of atom Atom, and the value", "Compound): self.throw_type_error('compound', term) if n.value < 0: self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value):", "the character code for the corresponding character of the name.''' def execute(self, atom,", "ordering of two terms. '@>='(X, Y) is true iff Y preceeds X or", "the value associated with a Prolog flag to be altered.''' def execute(self, flag,", "def pick_clause(self, head, body): if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0)", "PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x ### ### Arithmetic comparison (ISO 8.7) ###", "..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import sqrt return sqrt(x) def rightshift(n,", "characters of the name of the atom Atom_1.''' def execute(self, atom1, atom2, atom12):", "is a list whose elements are the one-char atoms corresponding to a character", "len(elements) == 1: t = elements.head return self.unify(term, t) elif len(elements) > 1:", "term processing (ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5,", "{term} else: term = deref(term) if isinstance(term, Atomic): return set() s = set()", "which could be output.''' def execute(self, number, codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error()", "renamed term variables bindings # outside the context of the copy_term/2 built-in if", "1: name = elements.head.name t = Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else: return", "l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l) if isinstance(term, Variable): # elements", "intdiv(x, y). Also, we need to manage ZeroDivisionError errors on our own.''' if", "execute(self, flag, value): from .. import core # for flags if not isvariable(flag)", "own.''' if not x: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import", "mgu.get(term.name) if value: return value else: return ground(term.binding(), mgu) if isinstance(term, Compound): args", "return self.unify(term, name) if isatom(name) and arity.value > 0: t = (Variable('_') for", "atom1.name n2 = atom2.name self.data = [(n1, n2, n1 + n2)] return self.pick_data(atom1,", "search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ### ### All", "else EV is the empty set.''' s = set() if isinstance(term, Atomic) or", "if isinstance(term, Compound): for arg in term.value[1:]: s.update(variable_set(arg)) else: # a list for", "pi): if not isvariable(pi) and not (pi.name == '/' and pi.arity == 2):", "tuple(Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) from ..builtin import search_builtin", "is a member of the set A or I or F and is", "def execute(self, atom, before, length, after, subatom): if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom)", "not None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body) def reexecute(self, head, body): self.reset_substitution()", "is not a member of the set V.''' def execute(self, x): return not", "and not isatom(name) and arity.value > 0: self.throw_type_error('atom', name) if isinstance(term, Atomic): return", "term and List is the list whose only element is Term, or *", "and not isatom(flag): self.throw_type_error('atom', flag) from .. import core # for flags if", "is much wider than the single # goal, even when using parentheses! '\\+/1'", "true iff the database contains at least one dynamic procedure with a clause", "is true iff evaluating E1 and E2 as expressions the corresponding arithmetic values", "# FIXME this should use write_canonical/1 lst = [Atomic(ord(c)) for c in chars]", ": Atom_1, 'integer/1' : Integer_1, 'float/1' : Float_1, 'atomic/1' : Atomic_1, 'compound/1' :", "if term.name == '^' and term.arity == 2: return iterated_goal_term(term.value[2]) return term def", "before, length, after, subatom) def reexecute(self, atom, before, length, after, subatom): self.reset_substitution() if", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n >> s def", "s)] elif isvariable(atom2): s = atom12.name if not s.startswith(atom1.name): return False else: i", "len(elements) > 1: self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head, Compound) and len(elements) >", "raise PrologEvaluationError('undefined') return float(x ** y) def logarithm(x): '''Redefined w.r.t. Python because we", "G is the iterated goal term of Goal * else G is T.'''", "= Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn):", "halt(X) neither succeeds nor fails. It has the side effect of exiting from", "if isinstance(before, Atomic): self.data = [(d, p) for (d, p) in self.data if", "return sqrt(x) def rightshift(n, s): '''Redefined w.r.t. Python because we need to manage", "for code in codelist.as_list()] return atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code)", "0 and isinstance(y, float): from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') if not x", "self.throw_representation_error(element) if isvariable(number) or islist(codelist): from ..parser import PrologParser, InvalidTermException s = ''.join([chr(code.value)", "self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term) current_prolog_flag(Flag, Value)", "?term) If X and Y are NSTO (Not Subject To Occur-check) then '='(X,", "(d, p) in self.data if len(n) - n.index(d, p) - len(d) == after.value]", "isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2) if not", "reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2]", "# Term creation and decomposition (ISO 8.5) 'functor/3' : Functor_3, 'arg/3' : Arg_3,", ": AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' :", "if isvariable(n) or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer', n) if not", "return self.pick_indicator(pi) def pick_indicator(self, pi): if not self.indicators: return False # the order", "self.throw_type_error('callable', term) from .. import core caller = core.Caller() caller._kb = self.kb result", "#subst = core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list =", "range(arity.value)) c = Compound(name.name, *t) from ..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure',", "Sv is the empty set * else if T is a variable then", "(pi.name == '/' and pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators = [] for", "returning to whatever system invoked Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X)", "the number of characters of the name of AtomR.''' def execute(self, atom, before,", "wwtt.value[1] #from copy import deepcopy #subst = core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness)", "whose elements are the one-char atoms whose names are the successive characters of", "EV is the empty set * else if T unifies with ^(V, G)", "w.r.t. Python because in ISO Prolog x // y is valid only when", "Y) is true iff X preceeds Y or X and Y are identical", "need to manage TypeError errors (e.g. n as float) on our own.''' if", "for each of the arguments of T.''' from ..core import deref if isinstance(term,", "Atomic) else 0) self.data = [] while start <= end: for i in", "pick_data(self, atom1, atom2, atom12): c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1]))", "return value else: return ground(term.binding(), mgu) if isinstance(term, Compound): args = [] for", "It is used to remove those unifying clauses from the database.''' def execute(self,", "i in range(len(s) + 1)] elif isvariable(atom1): s = atom12.name if not s.endswith(atom2.name):", "else if T unifies with ^(V, G) then EV is the union of", "s def float_integer_part(x): '''Redefined w.r.t. Python because in ISO Prolog float_integer_part(x) is valid", "[] procedure = self.kb.search(head) if not procedure: return False from .. import core", "the processor and returning to whatever system invoked Prolog, passing the value of", "hierarchy classes def variable_set(term): '''The variable set Sv of a term T is", "import PrologParser, InvalidTermException s = ''.join([char.name for char in charlist.as_list()]) try: # the", "because in ISO Prolog x // y is valid only when x and", "c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It is used to remove from", "true iff List is a list whose elements are the one-char atoms corresponding", ": ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival and information", "ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not", "goal, instances) def reexecute(self, template, goal, instances): self.reset_substitution() if not self.s: return False", "iff X is a member of the set V.''' def execute(self, x): return", "def execute(self, atom, codelist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if", "or isvariable(term): return s if term.name == '^' and term.arity == 2: s.update(variable_set(term.value[1]))", "return Compound(term.name, *args) return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances)", "= self.clauses.pop(0) return self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body))", "list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(c) for c in", "(ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2, 'halt/0' : Halt_0, 'halt/1' :", "y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering of two terms. '=='(X, Y)", "the set CT.''' def execute(self, x): return isinstance(x, (Compound, List)) class Nonvar_1(BuiltIn): '''nonvar(@term)", "Template or Goal would be instantiated by successive re-executions of \"call(Goal), X=Template\" after", "Compound('witness', *fvs) if fvs else Atomic('witness') g = iterated_goal_term(goal) findall = Findall_3(self.kb) findall.execute(Compound('+',", "if T unifies with ^(_, Goal) then G is the iterated goal term", "PrologEvaluationError raise PrologEvaluationError('undefined') from math import sqrt return sqrt(x) def rightshift(n, s): '''Redefined", "iff evaluating E1 and E2 as expressions the corresponding arithmetic value of E1", "elements.head) if islist(elements) and isinstance(elements.head, Compound) and len(elements) > 1: self.throw_type_error('atomic', elements.head) if", "is true iff characters of the name of the atom Atom_12 are the", "== '/' and pi.arity == 2: name, arity = pi.value[1:] if isvariable(name) or", "Compound(name.name, *t) from ..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator',", "List is the list whose only element is Term, or * Term is", "temp}) else: mgu.reduce() self.substitution.update(mgu) return True return False ### ### Arithmetic evaluation (ISO", "def _create_solution_list(self, s): return [] if s == List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3):", "self.throw_type_error('integer', length) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) size = Atomic(len(atom.name))", "not self.data: return False return self.pick_data(atom, before, length, after, subatom) def reexecute(self, atom,", "s = ''.join([char.name for char in charlist.as_list()]) try: # the parser needs an", "elements are the one-char atoms whose names are the successive characters of the", "procedure.clauses(): h, b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is not None and", "Missing max_arity related errors if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) # error on", "= Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else: return False else: return False class", "### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering of two terms. '@=<'(X, Y)", "reexecute(self, flag, value): self.reset_substitution() if not self.flags: return False return self.pick_flag(flag, value) def", "of E2.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 =", "'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2, 'halt/0' : Halt_0, 'halt/1' : Halt_1 }", "(ISO 8.3) 'var/1' : Var_1, 'atom/1' : Atom_1, 'integer/1' : Integer_1, 'float/1' :", "rightshift, '<</2' : leftshift, '/\\\\/2' : bitand, '\\\\//2' : bitor, '\\\\/1' : bitnot}", "import sqrt return sqrt(x) def rightshift(n, s): '''Redefined w.r.t. Python because we need", "self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi) return True ### ### All solutions (ISO 8.10) ### class", "a float.''' if not isinstance(x, float): from ..core import PrologTypeError raise PrologTypeError('float', Atomic(x))", "functor(Term, Name, Arity) is true iff: * Term is a compound term with", "chars = [c.name for c in charlist.as_list()] return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn):", "expressions the corresponding arithmetic values are equal.\"\"\" def execute(self, e1, e2): if isvariable(e1)", "for element in codelist.as_list(): if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if", "\"\"\"'@>=(@term, @term) Test the ordering of two terms. '@>='(X, Y) is true iff", "length, after, subatom): s, p = self.data.pop(0) b = atom.name.index(s, p) l =", "is valid only when x is a float.''' if not isinstance(x, float): from", "the functor name of Term and whose tail is a list of the", "Repeat_0(BuiltIn): '''repeat repeat is true. repeat is re-executable. ''' def execute(self): return True", "self.unify(elements, l) if isinstance(term, Variable): # elements is a list if elements.name ==", "\"\"\"'\\\\='(@term, @term) If X and Y are NSTO (Not Subject To Occur-check) then", "to the database after all existing clauses of the procedure whose predicate is", "Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify = [] self.clauses_to_remove = [] procedure", "Other arithmetic functors (ISO 9.3) ### Bitwise functors (ISO 9.4) ### class Is_2(BuiltIn):", "'repeat/0' : Repeat_0, # Atomic term processing (ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3'", "self.unify(term, c) return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term, Arg) is", "if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x", "core # for flags if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f =", "of the atom Atom_12 are the result of concatenating the characters of the", "is undefined.''' def execute(self, template, goal, instances): fvs = free_variable_set(goal, template) self.witness =", "into three pieces, AtomL, Sub_atom, and AtomR, such that Before is the number", "self.throw_instantiation_error() if not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) for element in codelist.as_list():", "CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true iff the character code for", "set of variables defined recursively as follows: * if T is an atomic", "l) if isinstance(term, Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return self.unify(elements, l) if", "8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering of two terms. '@=<'(X,", "< 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic', name) if", "== ':-/2': head = clause.value[1] body = clause.value[2] else: head = clause body", "def bitnot(x): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g.", "head = clause body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head)", "isvariable(y) and x.name == '_' and y.name == '_'): return True return x", "class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2) is true iff Term_2 unifies with a", "else: term = deref(term) if isinstance(term, Atomic): return set() s = set() if", "1: self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head, Compound) and len(elements) > 1: self.throw_type_error('atomic',", "body = Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses_to_remove:", "flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if len(f.allowed) == 1:", "(e.g. for log(0)) on our own.''' if not x: from ..core import PrologEvaluationError", "'=\\='(E1, E2) is true iff evaluating E1 and E2 as expressions the corresponding", "f, i = modf(x) return f def power(x, y): '''Redefined w.r.t. Python because", "def execute(self, x, y): return x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test", "execute(self, term, name, arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity):", "the same type of number as its input.''' if not x: return 0", "Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor fails. It has the side effect of", "self.throw_type_error('callable', body) self.clauses = [] procedure = self.kb.search(head) if not procedure: return False", "c = Compound(name.name, *t) return self.unify(term, c) return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term,", "functor = search_evaluable_functor(pi) if not functor: from ..core import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi))", "returning to whatever system invoked Prolog, passing the value of X as a", "'is/2' : Is_2, # Arithmetic comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' :", "of atom Atom.''' def execute(self, atom, charlist): if not isvariable(atom) and not isatom(atom):", "only in the context of the copy_term/2 built-in mgu = core.unify(t2, t) if", "value): if isvariable(flag) or isvariable(value): self.throw_instantiation_error() if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom',", "setof/3 assembles as a list the solutions of a goal for each different", "[chr(code.value) for code in codelist.as_list()] return atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character,", "of the arguments of Term.\"\"\" def execute(self, term, elements): if isvariable(term) and ispartiallist(elements):", "= Compound('/', Atomic(n), Atomic(int(a))) from .. import core if core.unify(pi, indicator) is not", "goal for each different instantiation of the free variables in that goal. Each", "core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s = s_next return self.unify(t_list,", "not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return x &", "whose only element is Term, or * Term is a compound term and", "None and core.unify(b, body) is not None): self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body)", "else: chars = [chr(code.value) for code in codelist.as_list()] return atom.name == ''.join(chars) class", "copy of t1 and t2 retain validity # only in the context of", "the set A or I or F and is false if X is", "= ground(deepcopy(template), caller.currsubst()) #values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values = List.EMPTY_LIST if not", "for i in range(arity.value)) c = Compound(name.name, *t) return self.unify(term, c) return False", "isvariable(term) and not isatom(name) and arity.value > 0: self.throw_type_error('atom', name) if isinstance(term, Atomic):", "pi): if isvariable(pi): self.throw_instantiation_error() if pi.name == '/' and pi.arity == 2: name,", "// y def module(x, y): '''Redefined w.r.t. Python because in ISO Prolog mod(x,", ": bitor, '\\\\/1' : bitnot} return d.get(name) def divide(x, y): '''Redefined w.r.t. Python", "Occur-check) then '='(X, Y) is true iff X and Y are unifiable.\"\"\" def", "the order in which each list is found is undefined.''' def execute(self, template,", "the existential variables set of the term G * else EV is the", "if isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value <", "y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering of two terms. '@>='(X, Y)", "self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer', n) if not isinstance(term, Compound): self.throw_type_error('compound', term)", "pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True class", "only element is Term, or * Term is a compound term and List", "operator d = {'+/2' : operator.add, '*/2' : operator.mul, '-/2' : operator.sub, '-/1'", "modf f, i = modf(x) return i def float_fractional_part(x): '''Redefined w.r.t. Python because", "= [] if s == List.EMPTY_LIST else s.as_list() solutions = list(set(solutions)) solutions.sort() return", "core.Caller() caller._kb = self.kb result = caller.solve(term) return not result class Repeat_0(BuiltIn): '''repeat", "self.throw_instantiation_error() if isvariable(number): for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number)", "def leftshift(n, s): '''Redefined w.r.t. Python because we need to manage TypeError errors", "def execute(self, x): if isvariable(x): self.throw_instantiation_error() if not isvariable(x) and not isnumber(x) and", "else: if not isvariant(a1, a2): return False return True return False PREDICATES =", "self.reset_substitution() if not self.s: return False return self.pick_bag(template, goal, instances) def pick_bag(self, template,", "iff Y preceeds X or Y and X are identical terms.\"\"\" def execute(self,", "each of the arguments of T.''' from ..core import deref if isinstance(term, Variable):", "variable_set(v) est = existential_variable_set(t) return vst.difference(vsv.union(est)) # TODO This should be distributed onto", "ArithmeticGreaterThanOrEqual_2, # Clause retrival and information (ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1' :", "and is false if X is a member of the set V, A,", "self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It is used", "len(atom1.name) self.data = [(s[:i], s[i:], s)] else: n1 = atom1.name n2 = atom2.name", "Length) is true iff integer Length equals the number of characters of the", "term T is a term defined recursively as follows: * if T unifies", "is true iff: * the predicate of Head is public, and * there", "code) if not isvariable(code): try: chr(code.value) except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c =", "solutions of Goal for each different instantiation of the free variables in it.", "raise PrologTypeError('integer', Atomic(n)) if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer',", "in charlist.as_list(): if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(number) or", "union of the variable set of V and the existential variables set of", "'''nonvar(@term) nonvar(X) is true iff X is not a member of the set", "term, then Sv is the empty set * else if T is a", "= chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code): c = ord(char.name) return self.unify(code, Atomic(c))", "execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result, Atomic(c)) def", "self.data: return False return self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1, atom2, atom12): c", "None ### ### Type testing (ISO 8.3) ### class Var_1(BuiltIn): '''var(@term) var(X) is", "self.throw_type_error('atom', subatom) if (not isvariable(before) and not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before)", "procedure with a clause Clause which unifies with Head :- Body. It is", "of the set I.''' def execute(self, x): return x.arity == 0 and isinstance(x.value,", "to which a variable X not occurring in Template or Goal would be", "goal) if (not isvariable(instances) and (not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances) from", "if isinstance(term, Variable): if not term.value: value = mgu.get(term.name) if value: return value", "FIXME \\+ does not work because of what is probably a parser #", "the Nth argument of Term is Arg.''' def execute(self, n, term, arg): if", "the database contains at least one dynamic procedure with a clause Clause which", "for f in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not None} if not self.flags:", "Python because in ISO Prolog sign(x) must return the same type of number", "def evaluate_expression(term): # TODO No overflow/underflow errors # TODO No undefined errors if", "if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f for f in", "Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor fails. It", "pick_clause(self, head, body): if not self.clauses: return False c = self.clauses.pop(0) return self.unify(Compound('clause',", "from math import modf f, i = modf(x) return i def float_fractional_part(x): '''Redefined", "return vst.difference(vsv.union(est)) # TODO This should be distributed onto the Term hierarchy classes", "if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x, int):", "len(element.name) != 1: self.throw_type_error('character', element) if isvariable(atom): from ..core import deref chars =", "-atom) atom_concat(Atom_1, Atom_2, Atom_12) is true iff characters of the name of the", "of T and BV where BV is a set of variables defined as", "use BuiltIn.unify because the bindings # between the renamed copy of t1 and", "if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist)", "term.isfree(): return {term} else: term = deref(term) if isinstance(term, Atomic): return set() s", "v2 class ArithmeticNotEqual_2(BuiltIn): \"\"\"'=\\='(@evaluable, @evaluable) '=\\='(E1, E2) is true iff evaluating E1 and", "return False return self.pick_data(atom, before, length, after, subatom) def pick_data(self, atom, before, length,", "class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds nor fails. It has the side effect", "== '^' and term.arity == 2: return iterated_goal_term(term.value[2]) return term def isvariant(t, v):", "F and is false if X is a member of the set V", "'''Redefined w.r.t. Python because in ISO Prolog div(x, y) with x and y", "Token n = PrologParser(s + '.').read_term() return self.unify(number, n) except InvalidTermException as e:", "Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause creation and destruction (ISO 8.9) 'asserta/1' :", "= evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <= v2 class ArithmeticGreaterThan_2(BuiltIn): \"\"\"'>'(@evaluable, @evaluable)", "with a Prolog flag to be altered.''' def execute(self, flag, value): if isvariable(flag)", "by # current_predicate/1 is implementation dependent i = self.indicators.pop() return self.unify(pi, i) ###", "Goal would be instantiated by successive re-executions of \"call(Goal), X=Template\" after systematic replacement", "y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering of two terms. '\\=='(X, Y)", "self.throw_type_error('list', charlist) for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name)", "atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true iff", "and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(codelist) and not ispartiallist(codelist):", "the atom Atom_1.''' def execute(self, atom1, atom2, atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error()", "= list(str(number.value)) # FIXME this should use write_canonical/1 lst = [Atomic(c) for c", "and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic',", "processing (ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2'", "E2 as expressions the corresponding arithmetic value of E1 is greater than or", "from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import sqrt return sqrt(x) def", "error on access permission to a user-defined # procedure is handled directly by", "X as a message.''' def execute(self, x): if isvariable(x): self.throw_instantiation_error() if not isvariable(x)", "self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value) for code in codelist.as_list()] return atom.name ==", "arithmetic value of E1 is less than the corresponding arithmetic value of E2.\"\"\"", "from ..parser import PrologParser, InvalidTermException s = ''.join([char.name for char in charlist.as_list()]) try:", "or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 > v2", "# Logic and control (ISO 8.15) # FIXME \\+ does not work because", "unifies with a term T which is a renamed copy of Term_1.''' def", "and x.name == '_' and y.name == '_'): return True return x !=", "TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering of two terms. '@>='(X, Y) is true", "if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1)", "X is a member of the set V or CT.''' def execute(self, x):", "charlist) for element in charlist.as_list(): if isvariable(element): self.throw_instantiation_error() if isatom(element) and len(element.name) !=", "isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if (not isvariable(after) and", "x, y): return x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering", "as a list the solutions of a goal for each different instantiation of", "= clause.value[1] body = clause.value[2] else: head = clause body = Atomic.TRUE return", "a set of variables defined recursively as follows: * if T is an", "atom2, atom12): c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12,", "variables set of the term G * else EV is the empty set.'''", "self.data = [(s[:i], s[i:], s)] else: n1 = atom1.name n2 = atom2.name self.data", "'number_codes/2' : NumberCodes_2, # Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2'", "the atom Atom_12 are the result of concatenating the characters of the name", "False from .. import core for clause in procedure.clauses(): h, b = convert_clause_to_term(clause.head(),", "return self.pick_bag(template, goal, instances) def reexecute(self, template, goal, instances): self.reset_substitution() if not self.s:", "class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is true iff: * the predicate of", "message.''' def execute(self, x): if isvariable(x): self.throw_instantiation_error() if not isvariable(x) and not isnumber(x)", "self.throw_permission_error('access', 'private_procedure', pi) from .. import core for clause in procedure.clauses(): h, b", "s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles as a list the solutions", "s): return [] if s == List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term,", "is equivalent to intdiv(x, y). Also, we need to manage ZeroDivisionError errors on", "the processor, and Value is the value currently associated with it.''' def execute(self,", "TypeError errors (e.g. x or y as float) on our own.''' if not", "of the user-defined procedures in the database.''' def execute(self, pi): if not isvariable(pi)", "isinstance(length.value, int))): self.throw_type_error('integer', length) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) size", "context of the copy_term/2 built-in if t2.name in mgu: # Still preserve the", "?atomic, ?integer) functor(Term, Name, Arity) is true iff: * Term is a compound", "the free variables in it. The elements of each list are in order", "+ 1): self.data.append((n[start:i], start)) start += 1 if isinstance(before, Atomic): self.data = [(d,", "math import modf f, i = modf(x) return f def power(x, y): '''Redefined", "self.throw_type_error('list', elements) if isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements) and not", "and not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for element in", "8.10) 'findall/3' : Findall_3, 'bagof/3' : Bagof_3, 'setof/3' : Setof_3, # Logic and", "raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core import PrologTypeError raise PrologTypeError('number', term) def", "error if isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and not", "arithmetic value of E2.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error()", "True return False PREDICATES = { # Term unification (ISO 8.2) '=/2' :", "element in charlist.as_list(): if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element) if isvariable(number)", "union of the variable sets for each of the arguments of T.''' from", "goal, instances) def pick_bag(self, template, goal, instances): wt = self.s[0] wt_list = [e", "def logarithm(x): '''Redefined w.r.t. Python because we need to manage ValueError errors (e.g.", "self.throw_type_error('atom', flag) if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f for", "is used to add Clause to the database after all existing clauses of", "self.unify(term, name) if isatom(name) and arity.value > 0: t = (Variable('_') for i", "Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It is used to add Clause to the", "Setof_3, # Logic and control (ISO 8.15) # FIXME \\+ does not work", "PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y, int): return x // y return x", "return self.unify(elements, l) if isinstance(term, Variable): # elements is a list if elements.name", "Atomic(x)) from math import modf f, i = modf(x) return i def float_fractional_part(x):", "codes = [Atomic(ord(char)) for char in atom.name] return self.unify(codelist, List.from_list(codes)) else: chars =", "isatom(elements.head) and len(elements) > 1: self.throw_type_error('atom', elements.head) if islist(elements) and isinstance(elements.head, Compound) and", "X is a member of the set CT.''' def execute(self, x): return isinstance(x,", "and returning to whatever system invoked Prolog, passing the value of X as", "v): '''The free variable set FV of a term T with respect to", "self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12): self.throw_instantiation_error() if not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom',", "'arg/3' : Arg_3, '=../2' : Univ_2, 'copy_term/2' : CopyTerm_2, # Arithmetic evaluation (ISO", "X and Y are identical terms.\"\"\" def execute(self, x, y): # The Python", "is Name and arity Arity, or * Term is an atomic term equal", "### class Var_1(BuiltIn): '''var(@term) var(X) is true iff X is a member of", "'_'): return False return x >= y ### ### Term creation and decomposition", "number) if isvariable(number) and not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist):", "is the number of characters of the name of Sub_atom, and After is", "'=='(X, Y) is true iff X and Y are identical terms.\"\"\" def execute(self,", "List.from_list(chars)) else: chars = [c.name for c in charlist.as_list()] return atom.name == ''.join(chars)", "coming # from a clause renaming) temp = mgu[t2.name] mgu.reduce() mgu.update({t2.name : temp})", "and Y are NSTO (Not Subject To Occur-check) then '\\\\='(X, Y) is true", "self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom,", "'abolish/1' : Abolish_1, # All solutions (ISO 8.10) 'findall/3' : Findall_3, 'bagof/3' :", "y def sign(x): '''Redefined w.r.t. Python because in ISO Prolog sign(x) must return", "evaluating E1 and E2 as expressions the corresponding arithmetic values are not equal.\"\"\"", "execute(self, atom1, atom2, atom12): if isvariable(atom1) and isvariable(atom12): self.throw_instantiation_error() if isvariable(atom2) and isvariable(atom12):", "?term) current_prolog_flag(Flag, Value) is true iff Flag is a flag supported by the", "element is Term, or * Term is a compound term and List is", "or * Term is an atomic term equal to Name and Arity is", "evaluate_expression(e2) return v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true", "if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag) if value.name not in f.allowed: culprit", "Abolish_1, # All solutions (ISO 8.10) 'findall/3' : Findall_3, 'bagof/3' : Bagof_3, 'setof/3'", "Nth argument of Term is Arg.''' def execute(self, n, term, arg): if isvariable(n)", "..core import PrologInstantiationError raise PrologInstantiationError() if term.arity == 0 and term._isnumber(): return term.value", "if isinstance(term, Atomic): l = List(term) return self.unify(elements, l) if isinstance(term, Compound): l", "isinstance(term, Compound): for arg in term.value[1:]: s.update(variable_set(arg)) else: # a list for e", "== List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles as a", "term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term): '''The existential variables set EV of a", "self.throw_type_error('integer', arity) # TODO Missing max_arity related error if isvariable(term) and arity.value <", "elements is a list if elements.name == '.' and elements.arity == 2: if", "if not isvariable(number) and not isnumber(number): self.throw_type_error('number', number) if isvariable(number) and not islist(codelist)", "to a # goal and on access permission to a user-defined # procedure", "return self.unify(atom, Atomic(''.join(chars))) elif isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char)) for char in", "raise PrologTypeError('float', Atomic(x)) from math import modf f, i = modf(x) return f", "not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value) # Utility functions def free_variable_set(t, v): '''The", "head, body): if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause',", "Atomic(s)) return x | y def bitnot(x): '''Redefined w.r.t. Python because we need", "TODO No undefined errors if isvariable(term): from ..core import PrologInstantiationError raise PrologInstantiationError() if", "Type testing (ISO 8.3) 'var/1' : Var_1, 'atom/1' : Atom_1, 'integer/1' : Integer_1,", "Term and whose tail is a list of the arguments of Term.\"\"\" def", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x % y def sign(x): '''Redefined", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(y, int): from ..core import", "list of values to which a variable X not occurring in Template or", "is a list whose elements correspond to the successive characters of the name", "execute(self, atom, codelist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom):", "the copy_term/2 built-in if t2.name in mgu: # Still preserve the binding for", ": NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2' :", "I.''' def execute(self, x): return x.arity == 0 and isinstance(x.value, int) class Float_1(BuiltIn):", "List(term) return self.unify(elements, l) if isinstance(term, Compound): l = List.from_list([Atomic(term.name)] + list(term.value[1:])) return", "comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering of two", "return False # the order in which predicate indicators are found by #", "return self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head, body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head):", "if isvariable(number) and not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) if islist(charlist): for", "BV is a set of variables defined as the union of the variable", "a: return False else: bijection[a1] = a2 else: if not isvariant(a1, a2): return", "true iff X is a member of the set CT.''' def execute(self, x):", "a sorted list, but the order in which each list is found is", "the procedure identified by Pred had never existed.''' def execute(self, pi): if isvariable(pi):", "after systematic replacement of all variables in X by new variables.''' def execute(self,", "that the latter term results from replacing each variable X in the former", "Y) is true iff X and Y are unifiable.\"\"\" def execute(self, x, y):", "and y.name == '_'): return False return x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term,", "isvariable(a2) and not a1.name.startswith('_'): a = bijection.get(a1) if a is not None and", "not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and", "and isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value,", "self.throw_domain_error('prolog_flag', flag) self.flags = {f for f in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is", "semantics for anonymous variables if (isvariable(x) and isvariable(y) and x.name == '_' and", "isinstance(n, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(s, int):", "pi) self.indicators = [] for i in self.kb: n, a = i.split('/') indicator", "self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and", "if not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) for element in codelist.as_list(): if", "a set of variables defined as the set difference of the variable set", "not isatom(flag): self.throw_type_error('atom', flag) from .. import core # for flags if flag.name", "and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO", "PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x % y def sign(x): '''Redefined w.r.t. Python", "self.data = [(d, p) for (d, p) in self.data if len(d) == length.value]", "import log return log(x) def squareroot(x): '''Redefined w.r.t. Python because we need to", "e in self.s if e not in wt_list] from .. import core for", "self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2': head = clause.value[1]", "core caller = core.Caller() caller._kb = self.kb result = caller.solve(term) return not result", "the characters of the name of the atom Atom_1.''' def execute(self, atom1, atom2,", "atom.name] return self.unify(charlist, List.from_list(chars)) else: chars = [c.name for c in charlist.as_list()] return", "associated with a Prolog flag to be altered.''' def execute(self, flag, value): if", "used to add Clause to the database before all existing clauses of the", "flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn): '''current_prolog_flag(?flag, ?term)", "def execute(self, x): return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff X", "### ### Implementation defined hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A", "before.value < 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and length.value < 0: self.throw_domain_error('not_less_than_zero', length)", "output.''' def execute(self, number, codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for", "== '_'): return True return x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test", "a compound term with a functor whose identifier is Name and arity Arity,", "def pick_clause(self, head, body): if not self.clauses: return False c = self.clauses.pop(0) return", "this should use write_canonical/1 lst = [Atomic(c) for c in chars] return self.unify(charlist,", "v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >= v2 ### ### Clause", "in order of solution, but the order in which each list is found", "lst = [Atomic(ord(c)) for c in chars] return self.unify(codelist, List.from_list(lst)) ### ### Implementation", "atom, length): if isvariable(atom): self.throw_instantiation_error() if not isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length)", "'''Redefined w.r.t. Python because we need to manage ValueError errors (e.g. for x", "and E2 as expressions the corresponding arithmetic value of E1 is less than", "return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause) is true iff the database contains at", "by the database from ..builtin import search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name),", "defined only when y is an integer, and always returns a float. Also,", "isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number): for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error()", "built-in mgu = core.unify(t2, t) if mgu is not None: if mgu: t2.apply(mgu)", "wt_list] from .. import core for wwtt, t in zip(wt_list, t_list): ww =", "isvariable(atom1): s = atom12.name if not s.endswith(atom2.name): return False else: i = s.index(atom2.name)", "(ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It is used to", "integer Length equals the number of characters of the name of the atom", "'/' and pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators = [] for i in", "procedure._public: pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from .. import core", "a2 else: if not isvariant(a1, a2): return False return True return False PREDICATES", "def execute(self, pi): if not isvariable(pi) and not (pi.name == '/' and pi.arity", "else: i = s.index(atom2.name) self.data = [(s[:i], s[i:], s)] elif isvariable(atom2): s =", "Term creation and decomposition (ISO 8.5) 'functor/3' : Functor_3, 'arg/3' : Arg_3, '=../2'", "> 0: t = (Variable('_') for i in range(arity.value)) c = Compound(name.name, *t)", "if isvariable(char) and isvariable(code): self.throw_instantiation_error() if not isvariable(char) and len(char.name) != 1: self.throw_type_error('character',", "return False c = self.clauses.pop(0) return self.unify(Compound('clause', head, body), c) def convert_clause_to_term(head, body):", "False return self.pick_bag(template, goal, instances) def reexecute(self, template, goal, instances): self.reset_substitution() if not", "Y or X and Y are identical terms.\"\"\" def execute(self, x, y): #", "to whatever system invoked Prolog, passing the value of X as a message.'''", "(ISO 8.2) ### class Unify_2(BuiltIn): \"\"\"'='(?term, ?term) If X and Y are NSTO", "import PrologEvaluationError raise PrologEvaluationError('undefined') if not x and y < 0: from ..core", "Clause to the database before all existing clauses of the procedure whose predicate", "\"call(Goal), X=Template\" after systematic replacement of all variables in X by new variables.'''", "self.unify(charlist, List.from_list(chars)) else: chars = [c.name for c in charlist.as_list()] return atom.name ==", "List is a list whose elements are the character codes corresponding to a", "return float(x ** y) def logarithm(x): '''Redefined w.r.t. Python because we need to", "iff: * Term is a compound term with a functor whose identifier is", ": module, 'rem/2' : module, 'floor/1' : math.floor, 'round/1' : round, 'ceiling/1' :", "= clause.value[2] else: head = clause body = Atomic.TRUE return self.pick_clause(head, body) def", "atomic term, then Sv is the empty set * else if T is", "Subject To Occur-check) then '\\\\='(X, Y) is true iff X and Y are", "self.throw_type_error('integer', x) exit(x.value) # Utility functions def free_variable_set(t, v): '''The free variable set", "elif len(elements) > 1: name = elements.head.name t = Compound(name, *elements.as_list()[1:]) return self.unify(term,", "isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x ### ###", "Number which could be output.''' def execute(self, number, charlist): if isvariable(number) and ispartiallist(charlist):", "from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x, int): from ..core import", "isinstance(term, Variable): if isinstance(name, Atomic) and arity.value == 0: return self.unify(term, name) if", "or * Term is a compound term and List is the list whose", "c) return False class Arg_3(BuiltIn): '''arg(+integer, +compound_term, ?term) arg(N, Term, Arg) is true", "# '\\\\+'(Term) is true iff call(Term) is false.\"\"\" def execute(self, term): if isvariable(term):", "for x < 0) on our own.''' if x < 0: from ..core", "'sin/1' : math.sin, 'cos/1' : math.cos, 'atan/1' : math.atan, 'exp/1' : math.exp, 'log/1'", "unifies with Head :- Body. It is used to remove those unifying clauses", "leftshift(n, s): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g.", "value of E1 is greater than the corresponding arithmetic value of E2.\"\"\" def", "of variables defined as the set difference of the variable set of T", "is true iff atom Atom can be broken into three pieces, AtomL, Sub_atom,", "value of each element is the character code for the corresponding character of", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x ### ### Arithmetic", "atom12): self.reset_substitution() if not self.data: return False return self.pick_data(atom1, atom2, atom12) def pick_data(self,", "if isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body) or iscallable(body)):", "if not self.flags: return False return self.pick_flag(flag, value) def reexecute(self, flag, value): self.reset_substitution()", "mod(x, y) is valid only when x and y are integers. Also, we", "Findall_3, 'bagof/3' : Bagof_3, 'setof/3' : Setof_3, # Logic and control (ISO 8.15)", "not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses = [] procedure = self.kb.search(head) if", "ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true iff evaluating E1 and E2 as", "predicate indicator for one of the user-defined procedures in the database.''' def execute(self,", "def execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error() c = evaluate_expression(expression) return self.unify(result, Atomic(c))", "AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true iff List is a list", "(ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is true iff: *", "f in core._FLAGS.values() if core.unify(flag, Atomic(f.name)) is not None} if not self.flags: return", "or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 != v2", "bagof(Template, Goal, Instances) assembles as a list the solutions of Goal for each", "self.pick_data(atom1, atom2, atom12) def reexecute(self, atom1, atom2, atom12): self.reset_substitution() if not self.data: return", "goal, even when using parentheses! '\\+/1' : Not_1, 'not/1' : Not_1, 'repeat/0' :", "and Y are identical terms.\"\"\" def execute(self, x, y): # The Python __eq__", "return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar, +list) '=..'(Term, List) is true", "self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body), c) class", "evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1,", "if not self.data: return False return self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1, atom2,", "processor and returning to whatever system invoked Prolog, passing the value of X", "and not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags", "ordering of two terms. '@=<'(X, Y) is true iff X preceeds Y or", "found by # current_predicate/1 is implementation dependent i = self.indicators.pop() return self.unify(pi, i)", "a member of the set I.''' def execute(self, x): return x.arity == 0", "ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s = s_next return self.unify(t_list, instances) def", "atom2) if not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2):", "element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(number) and not isnumber(number): self.throw_type_error('number',", "from ..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi) self.kb.abolish(pi)", "and E2 as expressions the corresponding arithmetic values are equal.\"\"\" def execute(self, e1,", "b = atom.name.index(s, p) l = len(s) a = len(atom.name) - (b +", "Value is the value currently associated with it.''' def execute(self, flag, value): from", "= evaluate_expression(e2) return v1 >= v2 ### ### Clause retrival and information (ISO", "are the one-char atoms whose names are the successive characters of the name", "isinstance(term, Compound): self.throw_type_error('compound', term) if n.value < 0: self.throw_domain_error('not_less_than_zero', n) if n.value >=", "return self.unify(Compound('clause', head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It is", "else: n1 = atom1.name n2 = atom2.name self.data = [(n1, n2, n1 +", "operator.neg, '//2' : divide, '///2' : intdivide, 'mod/2' : module, 'rem/2' : module,", "atomic term, then EV is the empty set * else if T unifies", "only when x and y are integers. Also, we need to manage ZeroDivisionError", "pi): if not self.indicators: return False # the order in which predicate indicators", "x >= y ### ### Term creation and decomposition (ISO 8.5) ### class", "subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s = s_next", ": power, 'sin/1' : math.sin, 'cos/1' : math.cos, 'atan/1' : math.atan, 'exp/1' :", "Head is public, and * there is a clause in the database which", "return self.unify(charlist, List.from_list(chars)) else: chars = [c.name for c in charlist.as_list()] return atom.name", "T is a set of variables defined recursively as follows: * if T", "math.exp, 'log/1' : logarithm, 'sqrt/1' : squareroot, '>>/2' : rightshift, '<</2' : leftshift,", "atom2, atom12): self.reset_substitution() if not self.data: return False return self.pick_data(atom1, atom2, atom12) def", "'private_procedure', pi) from .. import core for clause in procedure.clauses(): h, b =", "return self.pick_flag(flag, value) def reexecute(self, flag, value): self.reset_substitution() if not self.flags: return False", "operator.sub, '-/1' : operator.neg, '//2' : divide, '///2' : intdivide, 'mod/2' : module,", "PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y, int): return x // y", "and not (pi.name == '/' and pi.arity == 2): self.throw_type_error('predicate_indicator', pi) self.indicators =", "def execute(self, term, name, arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term) and", "class ArithmeticGreaterThanOrEqual_2(BuiltIn): \"\"\"'>='(@evaluable, @evaluable) '>='(E1, E2) is true iff evaluating E1 and E2", "class Var_1(BuiltIn): '''var(@term) var(X) is true iff X is a member of the", ": squareroot, '>>/2' : rightshift, '<</2' : leftshift, '/\\\\/2' : bitand, '\\\\//2' :", "self.flags: return False return self.pick_flag(flag, value) def reexecute(self, flag, value): self.reset_substitution() if not", "defined hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' : CurrentPrologFlag_2, 'halt/0' : Halt_0,", "as its input.''' if not x: return 0 if isinstance(x, int) else 0.0", "of evaluating Expression as an expression is Result.\"\"\" def execute(self, result, expression): if", ": CurrentPredicate_1, # Clause creation and destruction (ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1'", "body): if not self.clauses: return False c = self.clauses.pop(0) return self.unify(Compound('clause', head, body),", "= [Atomic(ord(c)) for c in chars] return self.unify(codelist, List.from_list(lst)) ### ### Implementation defined", "after, subatom): if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom)", "two terms. '\\=='(X, Y) is true iff X and Y are not identical", "float. Also, we need to manage ZeroDivisionError errors on our own.''' if x", "the name of the atom Atom_2 to the characters of the name of", "t_list = [e.value[2] for e in wt_list] s_next = [e for e in", "Code) is true iff the character code for the one-char atom Char is", "chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code): c = ord(char.name) return self.unify(code, Atomic(c)) else:", "replacing each variable X in the former by Xs.''' from ..core import deref", "defined recursively as follows: * if T unifies with ^(_, Goal) then G", "is true iff X is a member of the set F.''' def execute(self,", "self.throw_type_error('integer', after) if isnumber(before) and before.value < 0: self.throw_domain_error('not_less_than_zero', before) if isnumber(length) and", "self.s: return False return self.pick_bag(template, goal, instances) def reexecute(self, template, goal, instances): self.reset_substitution()", "= set() if isinstance(term, Atomic) or isvariable(term): return s if term.name == '^'", "raise PrologEvaluationError('undefined') from math import sqrt return sqrt(x) def rightshift(n, s): '''Redefined w.r.t.", "in zip(wt_list, t_list): ww = wwtt.value[1] #from copy import deepcopy #subst = core.unify(ww,", "valid only when x is a float.''' if not isinstance(x, float): from ..core", "such that Before is the number of characters of the name of AtomL,", ": bitand, '\\\\//2' : bitor, '\\\\/1' : bitnot} return d.get(name) def divide(x, y):", "return not result class Repeat_0(BuiltIn): '''repeat repeat is true. repeat is re-executable. '''", "0: return self.unify(term, name) if isatom(name) and arity.value > 0: t = (Variable('_')", "not equal.\"\"\" def execute(self, e1, e2): if isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 =", "we need to manage ValueError errors (e.g. for log(0)) on our own.''' if", "the parser needs an End Token n = PrologParser(s + '.').read_term() return self.unify(number,", "retrival and information (ISO 8.8) ### class Clause_2(BuiltIn): '''clause(+head, ?callable_term) clause(Head, Body) is", ": Is_2, # Arithmetic comparison (ISO 8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2,", "in codelist.as_list()] return atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code)", "a list for e in term.as_list(): s.update(variable_set(e)) return s def existential_variable_set(term): '''The existential", "''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true iff List is", "'''Redefined w.r.t. Python because in ISO Prolog float_fractional_part(x) is valid only when x", "w.r.t. Python because we need to manage ValueError errors (e.g. for log(0)) on", "True return x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering of", "search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False)", "and isvariable(v): return True if isinstance(t, Compound) and isinstance(v, Compound): if t.name !=", "return x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering of two", "operator's \"scope\" is much wider than the single # goal, even when using", "iff X and Y are identical terms.\"\"\" def execute(self, x, y): # The", "whatever system invoked Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn): '''halt(+integer) halt(X) neither succeeds", "body to a # goal and on access permission to a user-defined #", "### ### Arithmetic comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2)", ": math.atan, 'exp/1' : math.exp, 'log/1' : logarithm, 'sqrt/1' : squareroot, '>>/2' :", "Arg.''' def execute(self, n, term, arg): if isvariable(n) or isvariable(term): self.throw_instantiation_error() if not", "def execute(self, clause): if clause.predicate_indicator() == ':-/2': head = clause.value[1] body = clause.value[2]", "t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list) self.s = s_next return self.unify(t_list, instances) def _create_solution_list(self,", "Atomic.TRUE return self.pick_clause(head, body) def pick_clause(self, head, body): if not self.clauses_to_remove: return False", "and term.arity == 2: return iterated_goal_term(term.value[2]) return term def isvariant(t, v): '''Two terms", "len(element.name) != 1: self.throw_type_error('character', element) if isvariable(number) or islist(charlist): from ..parser import PrologParser,", "return set() s = set() if isinstance(term, Compound): for arg in term.value[1:]: s.update(variable_set(arg))", "true. repeat is re-executable. ''' def execute(self): return True def reexecute(self): return True", "not isatom(atom12): self.throw_type_error('atom', atom12) if isvariable(atom1) and isvariable(atom2): s = atom12.name self.data =", "isvariable(charlist) or islist(charlist) or ispartiallist(charlist): chars = [Atomic(c) for c in atom.name] return", "False bijection = {} for a1, a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1) and", "not islist(elements) and not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and islist(elements) and isvariable(elements.head):", "== 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2])) return s return s def iterated_goal_term(term): '''The iterated goal", "# Term comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2, '\\==/2' :", "if not iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses", "i in range(arity.value)) c = Compound(name.name, *t) return self.unify(term, c) return False class", "codelist.as_list()] return atom.name == ''.join(chars) class CharCode_2(BuiltIn): '''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is", "isinstance(term, (Compound, List)): return (self.unify(Atomic(term.name), name) and self.unify(Atomic(term.arity), arity)) if isinstance(term, Variable): if", "in term.value[1:]) pi = term.predicate_indicator() functor = search_evaluable_functor(pi) if not functor: from ..core", "'^' and term.arity == 2: return iterated_goal_term(term.value[2]) return term def isvariant(t, v): '''Two", "must return the same type of number as its input.''' if not x:", ": ArithmeticGreaterThanOrEqual_2, # Clause retrival and information (ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1'", "if clause.predicate_indicator() == ':-/2' else clause if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head)", "return self.pick_clause(head, body) def reexecute(self, clause): self.reset_substitution() if clause.predicate_indicator() == ':-/2': head =", "and Y are not identical terms.\"\"\" def execute(self, x, y): # The Python", "Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It is used to add Clause to the", "not isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term) and not isatom(name) and arity.value >", "'''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances) assembles as a list the solutions of", "iff List is a list whose elements are the one-char atoms corresponding to", "is a predicate indicator for one of the user-defined procedures in the database.'''", "is false if X is a member of the set V or CT.'''", "Expression as an expression is Result.\"\"\" def execute(self, result, expression): if isvariable(expression): self.throw_instantiation_error()", "n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) # FIXME this", "g, Variable('S')) s = findall.substitution['S'] self.s = self._create_solution_list(s) if not self.s: return False", "or equal to the corresponding arithmetic value of E2.\"\"\" def execute(self, e1, e2):", "NotUnifiable_2, # Type testing (ISO 8.3) 'var/1' : Var_1, 'atom/1' : Atom_1, 'integer/1'", "to a character sequence of Number which could be output.''' def execute(self, number,", "Variable): # elements is a list if elements.name == '.' and elements.arity ==", "sequence of Number which could be output.''' def execute(self, number, codelist): if isvariable(number)", "BuiltIn.unify because the bindings # between the renamed copy of t1 and t2", "X and Y are NSTO (Not Subject To Occur-check) then '\\\\='(X, Y) is", "(d, p) in self.data if n.index(d, p) == before.value] if isinstance(length, Atomic): self.data", "self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 == v2 class ArithmeticNotEqual_2(BuiltIn):", "== 0 and term._isnumber(): return term.value if isinstance(term, Compound): from ..core import deref", "iscallable from ..core import BuiltIn ### ### Term unification (ISO 8.2) ### class", "of solution, but the order in which each list is found is undefined.'''", "in atom.name] return self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value) for code in codelist.as_list()]", "compound(X) is true iff X is a member of the set CT.''' def", "from ..core import renamed_copy return renamed_copy(head) class CurrentPredicate_1(BuiltIn): '''current_predicate(?predicate_indicator) current_predicate(PI) is true iff", "except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number) or islist(codelist): from ..parser import PrologParser, InvalidTermException s", "the value of each element is the character code for the corresponding character", "p) in self.data if len(n) - n.index(d, p) - len(d) == after.value] if", "is a clause in the database which corresponds to a term H :-", "elements.head) if isvariable(term) and elements == List.EMPTY_LIST: self.throw_domain_error('non_empty_list', elements) # TODO Missing max_arity", "not a member of the set V.''' def execute(self, x): return not isvariable(x)", "self.throw_instantiation_error() if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) from .. import core", "set of variables defined as the union of the variable set of V", "in atom.name] return self.unify(charlist, List.from_list(chars)) else: chars = [c.name for c in charlist.as_list()]", "as the set difference of the variable set of T and BV where", "of using the proper unify_with_occur_check/2 predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term)", "log(x) def squareroot(x): '''Redefined w.r.t. Python because we need to manage ValueError errors", "self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # error on access permission to a user-defined", "if isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO Missing max_arity related", "the name.''' def execute(self, atom, codelist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom',", "the result of concatenating the characters of the name of the atom Atom_2", "'''char_code(+character, ?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true iff the character code for the", "set A.''' def execute(self, x): return isatom(x) class Integer_1(BuiltIn): '''integer(@term) integer(X) is true", "is a compound term then Sv is the union of the variable sets", "\"\"\"'=:='(@evaluable, @evaluable) '=:='(E1, E2) is true iff evaluating E1 and E2 as expressions", "[(d, p) for (d, p) in self.data if d == subatom.value] if not", "code.value class NumberChars_2(BuiltIn): '''number_chars(+number, ?character_list)\\nnumber_chars(-number, +character_list) number_chars(Number, List) is true iff List is", "self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body)", "on our own.''' if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer',", "x): return x.arity == 0 and isinstance(x.value, float) class Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is", "name of the atom Atom_2 to the characters of the name of the", "elif isvariable(code): c = ord(char.name) return self.unify(code, Atomic(c)) else: return ord(char.name) == code.value", ": round, 'ceiling/1' : math.ceil, 'truncate/1' : math.trunc, 'float/1' : float, 'abs/1' :", "codelist) for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error() if not isvariable(element): try: chr(element.value)", "TermNotIdentical_2, '@</2' : TermLessThan_2, '@>/2' : TermGreaterThan_2, '@>=/2' : TermGreaterThanOrEqual_2, # Term creation", ".. import core if core.unify(pi, indicator) is not None: self.indicators.append(indicator) return self.pick_indicator(pi) def", "Occur-check) then '\\\\='(X, Y) is true iff X and Y are not unifiable.\"\"\"", "0: self.throw_domain_error('not_less_than_zero', n) if n.value >= len(term.value): return False return self.unify(arg, term.value[n.value]) class", "asserta(Clause) is true. It is used to add Clause to the database before", "with it.''' def execute(self, flag, value): from .. import core # for flags", "after.value] if isinstance(subatom, Atomic): self.data = [(d, p) for (d, p) in self.data", "i in range(arity.value)) c = Compound(name.name, *t) from ..builtin import search_builtin if search_builtin(c):", "body): return (convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity == 0: return Atomic(head.name) from", "return False return True return False PREDICATES = { # Term unification (ISO", "overflow/underflow errors # TODO No undefined errors if isvariable(term): from ..core import PrologInstantiationError", "is true iff: * Term is an atomic term and List is the", "not self.data: return False return self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1, atom2, atom12):", "or F and is false if X is a member of the set", "Test the ordering of two terms. '\\=='(X, Y) is true iff X and", "isinstance(term, Variable): if not term.value: value = mgu.get(term.name) if value: return value else:", "= core.renamed_copy(t1) t = t1._copy_term() # Can't directly use BuiltIn.unify because the bindings", "reexecute(self, template, goal, instances): self.reset_substitution() if not self.s: return False return self.pick_bag(template, goal,", "char in atom.name] return self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value) for code in", "isinstance(x, int) else s def float_integer_part(x): '''Redefined w.r.t. Python because in ISO Prolog", "Atom, and the value of each element is the character code for the", "return t == v if isvariable(t) and isvariable(v): return True if isinstance(t, Compound)", "class TermLessThan_2(BuiltIn): \"\"\"'@<'(@term, @term) Test the ordering of two terms. '@<'(X, Y) is", "= [(d, p) for (d, p) in self.data if d == subatom.value] if", "not isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12) and not isatom(atom12): self.throw_type_error('atom', atom12) if", "instances): if isvariable(goal): self.throw_instantiation_error() if isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances) and (not", "the number of characters of the name of Sub_atom, and After is the", "latter term results from replacing each variable X in the former by Xs.'''", "i = modf(x) return f def power(x, y): '''Redefined w.r.t. Python because in", "log return log(x) def squareroot(x): '''Redefined w.r.t. Python because we need to manage", "distributed onto the Term hierarchy classes def variable_set(term): '''The variable set Sv of", "T is a variable then Sv is {T} * else if T is", "(Not Subject To Occur-check) then '\\\\='(X, Y) is true iff X and Y", "\"scope\" is much wider than the single # goal, even when using parentheses!", "return False from .. import core for clause in procedure.clauses(): h, b =", "term G * else EV is the empty set.''' s = set() if", "a Prolog flag to be altered.''' def execute(self, flag, value): if isvariable(flag) or", "for flags if flag.name not in core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if", "return isvariable(x) class Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff X is a member", "import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x ### ### Arithmetic comparison (ISO 8.7)", "not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) from .. import core # for", "term with a functor whose identifier is Name and arity Arity, or *", "corresponding arithmetic value of E1 is greater than the corresponding arithmetic value of", "self.s = self._create_solution_list(s) if not self.s: return False return self.pick_bag(template, goal, instances) def", "s return s def iterated_goal_term(term): '''The iterated goal term G of a term", "need to manage ValueError errors (e.g. for x < 0) on our own.'''", "terms. '@>'(X, Y) is true iff Y preceeds X.\"\"\" def execute(self, x, y):", "copy_term(Term_1, Term_2) is true iff Term_2 unifies with a term T which is", "TermGreaterThanOrEqual_2, # Term creation and decomposition (ISO 8.5) 'functor/3' : Functor_3, 'arg/3' :", "atom_concat(Atom_1, Atom_2, Atom_12) is true iff characters of the name of the atom", "\"\"\"not(@callable_term) not(Term) is true iff call(Term) is false.\"\"\" # \"\"\"'\\\\+'(@callable_term) # # '\\\\+'(Term)", "for e in wt_list] s_next = [e for e in self.s if e", "'abs/1' : operator.abs, 'sign/1' : sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2'", "and is false if X is a member of the set V or", "if not self.data: return False return self.pick_data(atom, before, length, after, subatom) def pick_data(self,", "SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before, Length, After, Sub_atom) is true", "of \"call(Goal), X=Template\" after systematic replacement of all variables in X by new", "body) def reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head, body) def pick_clause(self, head, body):", "of Term is Arg.''' def execute(self, n, term, arg): if isvariable(n) or isvariable(term):", "not isvariable(atom2) and not isatom(atom2): self.throw_type_error('atom', atom2) if not isvariable(atom12) and not isatom(atom12):", "number_chars(Number, List) is true iff List is a list whose elements are the", "to manage ZeroDivisionError errors on our own.''' if not y: from ..core import", "if not procedure: return False from .. import core for clause in procedure.clauses():", "except UnicodeDecodeError: self.throw_representation_error(code) if isvariable(char): c = chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code):", "element in codelist.as_list(): if not isvariable(element): try: chr(element.value) except UnicodeDecodeError: self.throw_representation_error(element) if isvariable(number)", "class Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity) is true", "list whose head is the functor name of Term and whose tail is", "elements are the one-char atoms corresponding to a character sequence of Number which", "as float) on our own.''' if not isinstance(x, int): from ..core import PrologTypeError", "operator.mul, '-/2' : operator.sub, '-/1' : operator.neg, '//2' : divide, '///2' : intdivide,", "/ y def intdivide(x, y): '''Redefined w.r.t. Python because in ISO Prolog x", "ispartiallist(codelist): codes = [Atomic(ord(char)) for char in atom.name] return self.unify(codelist, List.from_list(codes)) else: chars", "'=/2' : Unify_2, '\\=/2' : NotUnifiable_2, # Type testing (ISO 8.3) 'var/1' :", "self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi): if not self.indicators: return False # the", "difference of the variable set of T and BV where BV is a", "code in codelist.as_list()]) try: # the parser needs an End Token n =", "islist(codelist): from ..parser import PrologParser, InvalidTermException s = ''.join([chr(code.value) for code in codelist.as_list()])", "atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list',", "'''abolish(@predicate_indicator) abolish(Pred) is true. It is used to remove from the database the", "import search_builtin if search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.clauses_to_unify", "not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) if islist(codelist): for element in codelist.as_list():", "two terms. '=='(X, Y) is true iff X and Y are identical terms.\"\"\"", "y def bitor(x, y): '''Redefined w.r.t. Python because we need to manage TypeError", "by the database t = tuple(Variable('_') for i in range(arity.value)) c = Compound(name.name,", "Atomic(l)) and self.unify(after, Atomic(a)) and self.unify(subatom, Atomic(s))) class AtomChars_2(BuiltIn): '''atom_chars(+atom, ?character_list)\\natom_chars(-atom, +character_list) atom_chars(Atom,", "execute(self, x): return x.arity == 0 and isinstance(x.value, int) class Float_1(BuiltIn): '''float(@term) float(X)", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x // y def", "AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' : AtomChars_2, 'atom_codes/2' : AtomCodes_2, 'char_code/2' : CharCode_2,", "Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True class Retract_1(BuiltIn): '''retract(+clause) retract(Clause)", "# the order in which predicate indicators are found by # current_predicate/1 is", "#values.append(core.renamed_copy(v)) values.append(v._copy_term()) result = caller.solve_next() values = List.EMPTY_LIST if not values else List.from_list(values)", "(ISO 8.16) ### class AtomLength_2(BuiltIn): '''atom_length(+atom, ?integer) atom_length(Atom, Length) is true iff integer", "### Term comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test the ordering", "elif isvariable(codelist) or ispartiallist(codelist): codes = [Atomic(ord(char)) for char in atom.name] return self.unify(codelist,", "Variable): if not term.value: value = mgu.get(term.name) if value: return value else: return", "== 0: return self.unify(term, name) if isatom(name) and arity.value > 0: t =", "Logic and control (ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term) not(Term) is true iff", "isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) from .. import core # for flags", "instead of using the proper unify_with_occur_check/2 predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term,", "= ord(char.name) return self.unify(code, Atomic(c)) else: return ord(char.name) == code.value class NumberChars_2(BuiltIn): '''number_chars(+number,", "'\\\\/1' : bitnot} return d.get(name) def divide(x, y): '''Redefined w.r.t. Python because in", "implementation dependent i = self.indicators.pop() return self.unify(pi, i) ### ### Clause creation and", "after, subatom) def pick_data(self, atom, before, length, after, subatom): s, p = self.data.pop(0)", "isvariable(term) and not isinstance(name, Atomic): self.throw_type_error('atomic', name) if isvariable(term) and not isatom(name) and", "not ispartiallist(elements): self.throw_type_error('list', elements) if isvariable(term) and islist(elements) and isvariable(elements.head): self.throw_instantiation_error() if islist(elements)", "is valid only when x and y are integers. Also, we need to", "end + 1): self.data.append((n[start:i], start)) start += 1 if isinstance(before, Atomic): self.data =", "v1 != v2 class ArithmeticLessThan_2(BuiltIn): \"\"\"'<'(@evaluable, @evaluable) '<'(E1, E2) is true iff evaluating", "or ispartiallist(codelist): codes = [Atomic(ord(char)) for char in atom.name] return self.unify(codelist, List.from_list(codes)) else:", "flag supported by the processor, and Value is the value currently associated with", "Python because in ISO Prolog mod(x, y) is valid only when x and", "(e.g. x or y as float) on our own.''' if not isinstance(x, int):", "not isatom(name): self.throw_type_error('atom', name) # TODO Missing max_arity related errors if arity.value <", "class Number_1(BuiltIn): '''number(@term) number(X) is true if X is a member of the", "char_code(Char, Code) is true iff the character code for the one-char atom Char", "and arity.value == 0: return self.unify(term, name) if isatom(name) and arity.value > 0:", "of the term G * else EV is the empty set.''' s =", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n << s def bitand(x, y):", "self.witness, template), g, Variable('S')) s = findall.substitution['S'] self.s = self._create_solution_list(s) if not self.s:", "self.unify(t_list, instances) def _create_solution_list(self, s): return [] if s == List.EMPTY_LIST else s.as_list()", "p) l = len(s) a = len(atom.name) - (b + l) return (self.unify(before,", "(convert_to_term(head), convert_to_term(body)) def convert_to_term(head): if head.arity == 0: return Atomic(head.name) from ..core import", "chars] return self.unify(codelist, List.from_list(lst)) ### ### Implementation defined hooks (ISO 8.17) ### class", ":- Body.''' def execute(self, head, body): if isvariable(head): self.throw_instantiation_error() if not iscallable(head): self.throw_type_error('callable',", "if isinstance(term, Compound): from ..core import deref args = (evaluate_expression(deref(a)) for a in", "w.r.t. Python because in ISO Prolog float_integer_part(x) is valid only when x is", "wt_list] s_next = [e for e in self.s if e not in wt_list]", "Compound) and isinstance(v, Compound): if t.name != v.name or t.arity != v.arity: return", "= [e.value[2] for e in wt_list] s_next = [e for e in self.s", "= (Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) return self.unify(term, c)", "core._FLAGS: self.throw_domain_error('prolog_flag', flag) f = core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag)", "else if T is a compound term then Sv is the union of", "if not isinstance(arity.value, int): self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom', name) # TODO", "+compound_term, ?term) arg(N, Term, Arg) is true iff the Nth argument of Term", "return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances) assembles as a", "TODO Missing max_arity related error if isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity)", "pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi): if not self.indicators: return False #", "return True ### ### All solutions (ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term,", "iff the database contains at least one dynamic procedure with a clause Clause", "# procedure is handled directly by the database t = tuple(Variable('_') for i", "# bug: the operator's \"scope\" is much wider than the single # goal,", "bitand(x, y): '''Redefined w.r.t. Python because we need to manage TypeError errors (e.g.", "'''atomic(@term) atomic(X) is true if X is a member of the set A", ": Number_1, # Term comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' : TermIdentical_2,", "using parentheses! '\\+/1' : Not_1, 'not/1' : Not_1, 'repeat/0' : Repeat_0, # Atomic", "True ### ### All solutions (ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list)", "because in ISO Prolog div(x, y) with x and y integers is equivalent", "atomic term equal to Name and Arity is 0.''' def execute(self, term, name,", "is true iff Flag is a flag supported by the processor, and Value", "= deref(v) if isinstance(t, Atomic) and isinstance(v, Atomic): return t == v if", "8.3) 'var/1' : Var_1, 'atom/1' : Atom_1, 'integer/1' : Integer_1, 'float/1' : Float_1,", "'=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2'", "import isvariable, isatom, isnumber, islist, ispartiallist, iscallable from ..core import BuiltIn ### ###", "self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value, int): self.throw_type_error('integer', arity) # TODO Missing max_arity", "ground(term.binding(), mgu) if isinstance(term, Compound): args = [] for arg in term.value[1:]: args.append(ground(arg,", "if isinstance(t, Compound) and isinstance(v, Compound): if t.name != v.name or t.arity !=", "List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true iff List is", "evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >= v2 ### ### Clause retrival and", "self.unify(pi, i) ### ### Clause creation and destruction (ISO 8.9) ### class Asserta_1(BuiltIn):", "'_'): return True return x <= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the", "return False return x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering", "[Atomic(ord(char)) for char in atom.name] return self.unify(codelist, List.from_list(codes)) else: chars = [chr(code.value) for", "[] self.clauses_to_remove = [] procedure = self.kb.search(head) if not procedure: return False from", "{} for a1, a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2) and not", "elements) # TODO Missing max_arity related error if isinstance(term, Atomic): l = List(term)", "self.throw_instantiation_error() if not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2) and", "type of number as its input.''' if not x: return 0 if isinstance(x,", "if not self.clauses_to_remove: return False self.kb.retract(self.clauses_to_remove.pop(0)) c = self.clauses_to_unify.pop(0) return self.unify(Compound('clause', head, body),", "PREDICATES = { # Term unification (ISO 8.2) '=/2' : Unify_2, '\\=/2' :", "n.index(d, p) == before.value] if isinstance(length, Atomic): self.data = [(d, p) for (d,", "and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before, Length,", "> y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering of two terms. '@>='(X,", "characters of the name of the atom Atom_12 are the result of concatenating", "isvariable(e1) or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 <", "are NSTO (Not Subject To Occur-check) then '='(X, Y) is true iff X", "return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def pick_indicator(self, pi): if not", "bijection = {} for a1, a2 in zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2)", "isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and not core._FLAGS[flag.name]: self.throw_domain_error('prolog_flag', flag) self.flags = {f", "are the character codes corresponding to a character sequence of Number which could", "nor fails. It has the side effect of exiting from the processor and", "respect to a term V is a set of variables defined as the", "round, 'ceiling/1' : math.ceil, 'truncate/1' : math.trunc, 'float/1' : float, 'abs/1' : operator.abs,", "T.''' if term.name == '^' and term.arity == 2: return iterated_goal_term(term.value[2]) return term", "self.clauses.append(Compound('clause', h, b)) return self.pick_clause(head, body) def reexecute(self, head, body): self.reset_substitution() return self.pick_clause(head,", "variable set of V and the existential variable set of T.''' vst =", "defined recursively as follows: * if T is a variable or an atomic", "x, y): return x > y class TermGreaterThanOrEqual_2(BuiltIn): \"\"\"'@>=(@term, @term) Test the ordering", "raise PrologTypeError('integer', Atomic(s)) return x & y def bitor(x, y): '''Redefined w.r.t. Python", "+character_code) char_code(Char, Code) is true iff the character code for the one-char atom", "Body) is true iff: * the predicate of Head is public, and *", "x) return int(s) if isinstance(x, int) else s def float_integer_part(x): '''Redefined w.r.t. Python", "hold Prolog # semantics for anonymous variables if (isvariable(x) and isvariable(y) and x.name", "b = convert_clause_to_term(clause.head(), clause.body()) if (core.unify(h, head) is not None and core.unify(b, body)", "List) is true iff List is a list whose elements are the character", "isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 < v2 class", "islist(charlist): for element in charlist.as_list(): if isatom(element) and len(element.name) != 1: self.throw_type_error('character', element)", "if not isvariable(x) and not isnumber(x) and not isinstance(x.value, int): self.throw_type_error('integer', x) exit(x.value)", "a list the solutions of Goal for each different instantiation of the free", "set of the term G * else EV is the empty set.''' s", "n.value >= len(term.value): return False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar, ?list) '=..'(-nonvar,", "to the database before all existing clauses of the procedure whose predicate is", "= findall.substitution['S'] self.s = self._create_solution_list(s) if not self.s: return False return self.pick_bag(template, goal,", "'''halt halt neither succeeds nor fails. It has the side effect of exiting", "not None and a2 != a: return False else: bijection[a1] = a2 else:", "Atomic(n), Atomic(int(a))) from .. import core if core.unify(pi, indicator) is not None: self.indicators.append(indicator)", "core caller = core.Caller() caller._kb = self.kb values = [] result = caller.solve(goal)", "import PrologTypeError raise PrologTypeError('integer', Atomic(s)) return n << s def bitand(x, y): '''Redefined", "to remove from the database the procedure specified by the predicate indicator Pred", "true. It is used to add Clause to the database after all existing", "atoms corresponding to a character sequence of Number which could be output.''' def", "if not isvariable(pi) and not (pi.name == '/' and pi.arity == 2): self.throw_type_error('predicate_indicator',", "not isinstance(arity.value, int): self.throw_type_error('integer', arity) if not isatom(name): self.throw_type_error('atom', name) # TODO Missing", "then Sv is the empty set * else if T is a variable", "order in which predicate indicators are found by # current_predicate/1 is implementation dependent", "== '_' and y.name == '_'): return True return x != y class", "a = bijection.get(a1) if a is not None and a2 != a: return", "__eq__ method does not hold Prolog # semantics for anonymous variables if (isvariable(x)", "not x: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from math import log return", "?character_code)\\nchar_code(-character, +character_code) char_code(Char, Code) is true iff the character code for the one-char", "of characters of the name of AtomL, Length is the number of characters", "ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2, '>/2' : ArithmeticGreaterThan_2,", "V or CT.''' def execute(self, x): return isinstance(x, Atomic) class Compound_1(BuiltIn): '''compound(@term) compound(X)", "isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer', n) if not isinstance(term, Compound): self.throw_type_error('compound',", "is re-executable. ''' def execute(self): return True def reexecute(self): return True ### ###", "the union of the variable set of V and the existential variables set", "# TODO Missing max_arity related errors if arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) #", "atom12): c = self.data.pop(0) return (self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2])))", "*args) return term class Bagof_3(BuiltIn): '''bagof(?term, +callable_term, ?list) bagof(Template, Goal, Instances) assembles as", "self.throw_type_error('atom', flag) from .. import core # for flags if flag.name not in", "v1 < v2 class ArithmeticLessThanOrEqual_2(BuiltIn): \"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true iff evaluating", "Body. It is used to remove those unifying clauses from the database.''' def", "math.ceil, 'truncate/1' : math.trunc, 'float/1' : float, 'abs/1' : operator.abs, 'sign/1' : sign,", "is found is undefined.''' def _create_solution_list(self, s): solutions = [] if s ==", "self.data = [] while start <= end: for i in range(start, end +", "broken into three pieces, AtomL, Sub_atom, and AtomR, such that Before is the", "Simple arithmetic functors (ISO 9.1) ### Other arithmetic functors (ISO 9.3) ### Bitwise", "if isvariable(charlist): self.throw_instantiation_error() if not islist(charlist) and not ispartiallist(charlist): self.throw_type_error('list', charlist) for element", "a member of the set V.''' def execute(self, x): return not isvariable(x) class", "if isvariable(term) and isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and", "class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer, ?integer, ?atom) sub_atom(Atom, Before, Length, After, Sub_atom) is", "if core.unify(flag, Atomic(f.name)) is not None} if not self.flags: return False return self.pick_flag(flag,", "*t) from ..builtin import search_builtin if search_builtin(c): self.throw_permission_error('modify', 'static_procedure', pi) else: self.throw_type_error('predicate_indicator', pi)", "if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error()", "if isvariable(char): c = chr(code.value) return self.unify(char, Atomic(c)) elif isvariable(code): c = ord(char.name)", "from ..core import deref t = deref(t) v = deref(v) if isinstance(t, Atomic)", "atom) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer', length) if", "ordering of two terms. '=='(X, Y) is true iff X and Y are", "self.throw_type_error('character', element) if isvariable(atom): from ..core import deref chars = [deref(c).name for c", "or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 < v2", "mgu.reduce() self.substitution.update(mgu) return True return False ### ### Arithmetic evaluation (ISO 8.6) ###", "s def iterated_goal_term(term): '''The iterated goal term G of a term T is", "'_'): return False return x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the", "isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value, int))): self.throw_type_error('integer',", "errors (e.g. n as float) on our own.''' if not isinstance(n, int): from", ": NumberCodes_2, # Implementation defined hooks (ISO 8.17) 'set_prolog_flag/2' : SetPrologFlag_2, 'current_prolog_flag/2' :", "free variables in that goal. Each list is a sorted list, but the", "we need to manage ValueError errors (e.g. for x < 0) on our", "import core caller = core.Caller() caller._kb = self.kb result = caller.solve(term) return not", "follows: * if T is a variable or an atomic term, then EV", "n) if n.value >= len(term.value): return False return self.unify(arg, term.value[n.value]) class Univ_2(BuiltIn): \"\"\"'=..'(+nonvar,", "if not isatom(atom): self.throw_type_error('atom', atom) if (not isvariable(length) and not (isnumber(length) and isinstance(length.value,", "as e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) # FIXME this should use write_canonical/1", "'number/1' : Number_1, # Term comparison (ISO 8.4) '@=</2' : TermLessThanOrEqual_2, '==/2' :", "character sequence of Number which could be output.''' def execute(self, number, codelist): if", "and After is the number of characters of the name of AtomR.''' def", "atoms whose names are the successive characters of the name of atom Atom.'''", "if s == List.EMPTY_LIST else s.as_list() class Setof_3(Bagof_3): '''setof(?term, +callable_term, ?list) setof/3 assembles", "else: i = len(atom1.name) self.data = [(s[:i], s[i:], s)] else: n1 = atom1.name", "!= 1: self.throw_type_error('character', element) if isvariable(number) or islist(charlist): from ..parser import PrologParser, InvalidTermException", "[(s[:i], s[i:], s)] else: n1 = atom1.name n2 = atom2.name self.data = [(n1,", "supported by the processor, and Value is the value currently associated with it.'''", "our own.''' if x < 0 and isinstance(y, float): from ..core import PrologEvaluationError", "8.15) # FIXME \\+ does not work because of what is probably a", "Y preceeds X.\"\"\" def execute(self, x, y): return x > y class TermGreaterThanOrEqual_2(BuiltIn):", "Atomic_1(BuiltIn): '''atomic(@term) atomic(X) is true if X is a member of the set", "in ISO Prolog float_fractional_part(x) is valid only when x is a float.''' if", "variables defined recursively as follows: * if T is a variable or an", "self.kb: n, a = i.split('/') indicator = Compound('/', Atomic(n), Atomic(int(a))) from .. import", "s[i:], s)] else: n1 = atom1.name n2 = atom2.name self.data = [(n1, n2,", "not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before) if (not isvariable(length) and not (isnumber(length)", "Arity) is true iff: * Term is a compound term with a functor", "handled directly by the database from ..builtin import search_builtin if search_builtin(head): pi =", "Atomic(x)) if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return", "+= 1 if isinstance(before, Atomic): self.data = [(d, p) for (d, p) in", ": ArithmeticGreaterThan_2, '>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival and information (ISO 8.8) 'clause/2'", "ValueError errors (e.g. for x < 0) on our own.''' if x <", "body = Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # error on", "### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances) is true iff Instances", "iscallable(head): self.throw_type_error('callable', head) if not (isvariable(body) or iscallable(body)): self.throw_type_error('callable', body) self.clauses = []", "then Sv is {T} * else if T is a compound term then", "islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) for element in codelist.as_list(): if isvariable(element): self.throw_instantiation_error()", "2: name, arity = pi.value[1:] if isvariable(name) or isvariable(arity): self.throw_instantiation_error() if not isinstance(arity.value,", "clauses, leaving the database in the same state as if the procedure identified", "isnumber(goal): self.throw_type_error('callable', goal) if (not isvariable(instances) and (not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list',", "from .. import core # for flags if not isvariable(flag) and not isatom(flag):", "v = deref(v) if isinstance(t, Atomic) and isinstance(v, Atomic): return t == v", "related error if isvariable(term) and arity.value < 0: self.throw_domain_error('not_less_than_zero', arity) if isvariable(term) and", "manage ZeroDivisionError errors on our own.''' if x < 0 and isinstance(y, float):", "Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances) is true iff Instances unifies with", "not self.data: return False return self.pick_data(atom, before, length, after, subatom) def pick_data(self, atom,", "isvariable(name): self.throw_instantiation_error() if isvariable(term) and isvariable(arity): self.throw_instantiation_error() if isvariable(term) and not isinstance(arity.value, int):", "head, body), c) class Abolish_1(BuiltIn): '''abolish(@predicate_indicator) abolish(Pred) is true. It is used to", "which each list is found is undefined.''' def execute(self, template, goal, instances): fvs", "if not isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x", ">= y ### ### Term creation and decomposition (ISO 8.5) ### class Functor_3(BuiltIn):", "on our own.''' if x < 0: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined')", "..core import PrologEvaluationError raise PrologEvaluationError('undefined') if not x and y < 0: from", "elif isvariable(atom2): s = atom12.name if not s.startswith(atom1.name): return False else: i =", "returns a float. Also, we need to manage ZeroDivisionError errors on our own.'''", "x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering of two terms.", "for a in term.value[1:]) pi = term.predicate_indicator() functor = search_evaluable_functor(pi) if not functor:", "t) else: return False else: return False class CopyTerm_2(BuiltIn): '''copy_term(?term, ?term) copy_term(Term_1, Term_2)", "self.throw_type_error('predicate_indicator', pi) self.indicators = [] for i in self.kb: n, a = i.split('/')", "not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if isinstance(x, int) and isinstance(y,", "the corresponding arithmetic value of E1 is greater than the corresponding arithmetic value", "self.data = [(n1, n2, n1 + n2)] return self.pick_data(atom1, atom2, atom12) def reexecute(self,", "used to remove from the database the procedure specified by the predicate indicator", "= [] result = caller.solve(goal) while result: from copy import deepcopy v =", "(ISO 8.8) 'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause creation and destruction", "of two terms. '@>='(X, Y) is true iff Y preceeds X or Y", "if there is a bijection s of the variables of the former to", "char, code): if isvariable(char) and isvariable(code): self.throw_instantiation_error() if not isvariable(char) and len(char.name) !=", "or t.arity != v.arity: return False bijection = {} for a1, a2 in", "Atomic(x)) return ~x ### ### Arithmetic comparison (ISO 8.7) ### class ArithmeticEqual_2(BuiltIn): \"\"\"'=:='(@evaluable,", "Atomic): self.data = [(d, p) for (d, p) in self.data if len(n) -", ": sign, 'float_integer_part/1' : float_integer_part, 'float_fractional_part/1' : float_fractional_part, '**/2' : power, 'sin/1' :", "for i in range(arity.value)) c = Compound(name.name, *t) from ..builtin import search_builtin if", "is public, and * there is a clause in the database which corresponds", "isinstance(term, Atomic): return set() s = set() if isinstance(term, Compound): for arg in", "s, p = self.data.pop(0) b = atom.name.index(s, p) l = len(s) a =", "ground(term, mgu): if isinstance(term, Variable): if not term.value: value = mgu.get(term.name) if value:", "and the existential variable set of T.''' vst = variable_set(t) vsv = variable_set(v)", "exiting from the processor and returning to whatever system invoked Prolog.''' def execute(self):", "be output.''' def execute(self, number, codelist): if isvariable(number) and ispartiallist(codelist): self.throw_instantiation_error() if isvariable(number):", "'>=/2' : ArithmeticGreaterThanOrEqual_2, # Clause retrival and information (ISO 8.8) 'clause/2' : Clause_2,", "if not isinstance(x, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not", "'char_code/2' : CharCode_2, 'number_chars/2' : NumberChars_2, 'number_codes/2' : NumberCodes_2, # Implementation defined hooks", "squareroot, '>>/2' : rightshift, '<</2' : leftshift, '/\\\\/2' : bitand, '\\\\//2' : bitor,", "> 1: name = elements.head.name t = Compound(name, *elements.as_list()[1:]) return self.unify(term, t) else:", "isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # error on access permission to a", "'''compound(@term) compound(X) is true iff X is a member of the set CT.'''", "{T} * else if T is a compound term then Sv is the", "Atomic(f.name)) is not None} if not self.flags: return False return self.pick_flag(flag, value) def", "a member of the set A.''' def execute(self, x): return isatom(x) class Integer_1(BuiltIn):", "true iff: * Term is an atomic term and List is the list", "t2.apply(mgu) # Do not propagate renamed term variables bindings # outside the context", "int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) if not isinstance(y, int): from", "':-/2': head = clause.value[1] body = clause.value[2] else: head = clause body =", "?character_code_list) number_codes(Number, List) is true iff List is a list whose elements are", "'exp/1' : math.exp, 'log/1' : logarithm, 'sqrt/1' : squareroot, '>>/2' : rightshift, '<</2'", "manage TypeError errors (e.g. x or y as float) on our own.''' if", "= clause.value[1] if clause.predicate_indicator() == ':-/2' else clause if isvariable(head): self.throw_instantiation_error() if isnumber(head):", "List.from_list(lst)) ### ### Implementation defined hooks (ISO 8.17) ### class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar)", "in f.allowed: culprit = Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return", "solutions (ISO 8.10) ### class Findall_3(BuiltIn): '''findall(?term, +callable_term, ?list) findall(Template, Goal, Instances) is", "2): self.throw_type_error('predicate_indicator', pi) self.indicators = [] for i in self.kb: n, a =", "return self.pick_flag(flag, value) def pick_flag(self, flag, value): f = self.flags.pop() return self.unify(flag, Atomic(f.name))", "not isvariable(atom1) and not isatom(atom1): self.throw_type_error('atom', atom1) if not isvariable(atom2) and not isatom(atom2):", "Atom.''' def execute(self, atom, charlist): if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom)", "charlist.as_list()]) try: # the parser needs an End Token n = PrologParser(s +", "expressions the corresponding arithmetic value of E1 is greater than or equal to", "isnumber(x) ### ### Term comparison (ISO 8.4) ### class TermLessThanOrEqual_2(BuiltIn): \"\"\"'@=<'(@term, @term) Test", "!= 1: self.throw_type_error('character', element) if isvariable(atom): from ..core import deref chars = [deref(c).name", "import PrologTypeError raise PrologTypeError('evaluable', Atomic(pi)) return functor(*args) from ..core import PrologTypeError raise PrologTypeError('number',", "# the parser needs an End Token n = PrologParser(s + '.').read_term() return", "== 1: self.throw_permission_error('modify', 'flag', flag) if value.name not in f.allowed: culprit = Compound('+',", "on our own.''' if not x: from ..core import PrologEvaluationError raise PrologEvaluationError('undefined') from", "raise PrologTypeError('integer', Atomic(y)) return x // y def module(x, y): '''Redefined w.r.t. Python", "List.EMPTY_LIST if not values else List.from_list(values) return self.unify(values, instances) def ground(term, mgu): if", "were a renamed variable (e.g. coming # from a clause renaming) temp =", "for e in self.s if e not in wt_list] from .. import core", "lst = [Atomic(c) for c in chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number,", "'clause/2' : Clause_2, 'current_predicate/1' : CurrentPredicate_1, # Clause creation and destruction (ISO 8.9)", "iff the Nth argument of Term is Arg.''' def execute(self, n, term, arg):", "self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list) number_codes(Number, List) is true iff List", "is an atomic term and List is the list whose only element is", "float_integer_part(x) is valid only when x is a float.''' if not isinstance(x, float):", "is a flag supported by the processor, and Value is the value currently", "not isinstance(n.value, int): self.throw_type_error('integer', n) if not isinstance(term, Compound): self.throw_type_error('compound', term) if n.value", "arity.value == 0: return self.unify(term, name) if isatom(name) and arity.value > 0: t", "Term, Arg) is true iff the Nth argument of Term is Arg.''' def", "to Name and Arity is 0.''' def execute(self, term, name, arity): if isvariable(term)", "== length.value] if isinstance(after, Atomic): self.data = [(d, p) for (d, p) in", "Y) is true iff X and Y are identical terms.\"\"\" def execute(self, x,", "by Pred had never existed.''' def execute(self, pi): if isvariable(pi): self.throw_instantiation_error() if pi.name", "= self.indicators.pop() return self.unify(pi, i) ### ### Clause creation and destruction (ISO 8.9)", "set of V and the existential variables set of the term G *", "if (core.unify(h, head) is not None and core.unify(b, body) is not None): self.clauses_to_unify.append(Compound('clause',", "class SetPrologFlag_2(BuiltIn): '''set_prolog_flag(+flag, @nonvar) A goal set_prolog_flag(Flag, Value) enables the value associated with", "pi) self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is true. It is", "in zip(t.value[1:], v.value[1:]): if isvariable(a1) and isvariable(a2) and not a1.name.startswith('_'): a = bijection.get(a1)", "return s return s def iterated_goal_term(term): '''The iterated goal term G of a", "for flags if not isvariable(flag) and not isatom(flag): self.throw_type_error('atom', flag) if isatom(flag) and", "destruction (ISO 8.9) 'asserta/1' : Asserta_1, 'assertz/1' : Assertz_1, 'retract/1' : Retract_1, 'abolish/1'", "destruction (ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause) asserta(Clause) is true. It is used", "a message.''' def execute(self, x): if isvariable(x): self.throw_instantiation_error() if not isvariable(x) and not", "the bindings # between the renamed copy of t1 and t2 retain validity", "each list is found is undefined.''' def _create_solution_list(self, s): solutions = [] if", "or I or F and is false if X is a member of", "subatom) if (not isvariable(before) and not (isnumber(before) and isinstance(before.value, int))): self.throw_type_error('integer', before) if", "processor and returning to whatever system invoked Prolog.''' def execute(self): exit(0) class Halt_1(BuiltIn):", "empty set.''' s = set() if isinstance(term, Atomic) or isvariable(term): return s if", "self.pick_data(atom1, atom2, atom12) def pick_data(self, atom1, atom2, atom12): c = self.data.pop(0) return (self.unify(atom1,", "import deref chars = [deref(c).name for c in charlist.as_list()] return self.unify(atom, Atomic(''.join(chars))) elif", "1 if isinstance(before, Atomic): self.data = [(d, p) for (d, p) in self.data", "List is a list whose elements are the one-char atoms whose names are", "recursively as follows: * if T is an atomic term, then Sv is", "mgu.update({t2.name : temp}) else: mgu.reduce() self.substitution.update(mgu) return True return False ### ### Arithmetic", "Arity is 0.''' def execute(self, term, name, arity): if isvariable(term) and isvariable(name): self.throw_instantiation_error()", "?list) bagof(Template, Goal, Instances) assembles as a list the solutions of Goal for", "the name of atom Atom, and the value of each element is the", "PrologInstantiationError raise PrologInstantiationError() if term.arity == 0 and term._isnumber(): return term.value if isinstance(term,", "Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=False) return True class Assertz_1(BuiltIn): '''assertz(@clause) assertz(Clause) is", "'**/2' : power, 'sin/1' : math.sin, 'cos/1' : math.cos, 'atan/1' : math.atan, 'exp/1'", "core._FLAGS[flag.name] if len(f.allowed) == 1: self.throw_permission_error('modify', 'flag', flag) if value.name not in f.allowed:", "return self.unify(number, n) except InvalidTermException as e: self.throw_syntax_error(Atomic(s)) else: chars = list(str(number.value)) #", "Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('access', 'private_procedure', pi) from .. import core for clause in procedure.clauses():", "arity.value > 0: t = (Variable('_') for i in range(arity.value)) c = Compound(name.name,", "corresponding arithmetic value of E1 is less than or equal to the corresponding", "instances) def _create_solution_list(self, s): return [] if s == List.EMPTY_LIST else s.as_list() class", "i) ### ### Clause creation and destruction (ISO 8.9) ### class Asserta_1(BuiltIn): '''asserta(@clause)", "are the one-char atoms corresponding to a character sequence of Number which could", "ISO Prolog sign(x) must return the same type of number as its input.'''", "number of characters of the name of AtomL, Length is the number of", "by successive re-executions of \"call(Goal), X=Template\" after systematic replacement of all variables in", "Atomic.TRUE if isvariable(head): self.throw_instantiation_error() if isnumber(head): self.throw_type_error('callable', head) # error on access permission", "// y return x / y def intdivide(x, y): '''Redefined w.r.t. Python because", "self.clauses_to_unify = [] self.clauses_to_remove = [] procedure = self.kb.search(head) if not procedure: return", "execute(self, x, y): # TODO prologlib crashes if you attempt to unify two", "Flag is a flag supported by the processor, and Value is the value", "self.throw_instantiation_error() if isnumber(term): self.throw_type_error('callable', term) from .. import core caller = core.Caller() caller._kb", "outside the context of the copy_term/2 built-in if t2.name in mgu: # Still", "is true iff List is a list whose elements are the character codes", "t2 were a renamed variable (e.g. coming # from a clause renaming) temp", "is not None: self.indicators.append(indicator) return self.pick_indicator(pi) def reexecute(self, pi): self.reset_substitution() return self.pick_indicator(pi) def", "\"\"\"'@=<'(@term, @term) Test the ordering of two terms. '@=<'(X, Y) is true iff", "y def intdivide(x, y): '''Redefined w.r.t. Python because in ISO Prolog x //", "isinstance(y, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(y)) return x % y", "= Compound('+', flag, value) self.throw_domain_error('flag_value', culprit) core._FLAGS[flag.name] = f._replace(value=value.name) return True class CurrentPrologFlag_2(BuiltIn):", "This should be distributed onto the Term hierarchy classes def variable_set(term): '''The variable", "search_builtin(head): pi = Compound('/', Atomic(head.name), Atomic(head.arity)) self.throw_permission_error('modify', 'static_procedure', pi) self.kb.assert_clause(clause, append=True) return True", "math import log return log(x) def squareroot(x): '''Redefined w.r.t. Python because we need", "reexecute(self, atom, before, length, after, subatom): self.reset_substitution() if not self.data: return False return", "term T is a set of variables defined recursively as follows: * if", "| y def bitnot(x): '''Redefined w.r.t. Python because we need to manage TypeError", "not isinstance(n, int): from ..core import PrologTypeError raise PrologTypeError('integer', Atomic(n)) if not isinstance(s,", "List is a list whose elements are the one-char atoms corresponding to a", "or isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 >= v2", "Term unification (ISO 8.2) '=/2' : Unify_2, '\\=/2' : NotUnifiable_2, # Type testing", "isvariable(e2): self.throw_instantiation_error() v1 = evaluate_expression(e1) v2 = evaluate_expression(e2) return v1 != v2 class", "[] procedure = self.kb.search(head) if not procedure: return False if not procedure._public: pi", "errors (e.g. for x < 0) on our own.''' if x < 0:", "the order in which predicate indicators are found by # current_predicate/1 is implementation", "AtomL, Sub_atom, and AtomR, such that Before is the number of characters of", "return solutions ### ### Logic and control (ISO 8.15) ### class Not_1(BuiltIn): \"\"\"not(@callable_term)", "if isvariable(atom): self.throw_instantiation_error() if not isvariable(atom) and not isatom(atom): self.throw_type_error('atom', atom) if not", "char in charlist.as_list()]) try: # the parser needs an End Token n =", "'''float(@term) float(X) is true iff X is a member of the set F.'''", "expressions the corresponding arithmetic value of E1 is less than or equal to", "to a user-defined # procedure is handled directly by the database from ..builtin", "return self.unify(pi, i) ### ### Clause creation and destruction (ISO 8.9) ### class", "X by new variables.''' def execute(self, template, goal, instances): if isvariable(goal): self.throw_instantiation_error() if", "if islist(codelist): for element in codelist.as_list(): if not isvariable(element): try: chr(element.value) except UnicodeDecodeError:", "else: bijection[a1] = a2 else: if not isvariant(a1, a2): return False return True", "caller.solve(term) return not result class Repeat_0(BuiltIn): '''repeat repeat is true. repeat is re-executable.", "is a variable or an atomic term, then EV is the empty set", "return atom.name == ''.join(chars) class AtomCodes_2(BuiltIn): '''atom_codes(+atom, ?character_code_list)\\natom_codes(-atom, +character_code_list) atom_codes(Atom, List) is true", "and length.value < 0: self.throw_domain_error('not_less_than_zero', length) if isnumber(after) and after.value < 0: self.throw_domain_error('not_less_than_zero',", "'''The variable set Sv of a term T is a set of variables", "G) then EV is the union of the variable set of V and", "= core.unify(ww, deepcopy(self.witness)) subst = core.unify(ww, self.witness) ww.apply(subst) t.apply(subst) self.substitution.update(subst) t_list = List.from_list(t_list)", "manage ValueError errors (e.g. for log(0)) on our own.''' if not x: from", "Atomic(int(a))) from .. import core if core.unify(pi, indicator) is not None: self.indicators.append(indicator) return", "'''The free variable set FV of a term T with respect to a", "set * else if T unifies with ^(V, G) then EV is the", "w.r.t. Python because in ISO Prolog sign(x) must return the same type of", "= Findall_3(self.kb) findall.execute(Compound('+', self.witness, template), g, Variable('S')) s = findall.substitution['S'] self.s = self._create_solution_list(s)", "isvariable(term): return s if term.name == '^' and term.arity == 2: s.update(variable_set(term.value[1])) s.update(existential_variable_set(term.value[2]))", "number as its input.''' if not x: return 0 if isinstance(x, int) else", "\"\"\"'=<'(@evaluable, @evaluable) '=<'(E1, E2) is true iff evaluating E1 and E2 as expressions", "def iterated_goal_term(term): '''The iterated goal term G of a term T is a", "if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) for element", "<= y class TermIdentical_2(BuiltIn): \"\"\"'=='(@term, @term) Test the ordering of two terms. '=='(X,", "or y as float) on our own.''' if not isinstance(x, int): from ..core", "t = tuple(Variable('_') for i in range(arity.value)) c = Compound(name.name, *t) from ..builtin", "unifies with Head :- Body.''' def execute(self, head, body): if isvariable(head): self.throw_instantiation_error() if", "a list whose elements are the character codes corresponding to a character sequence", "y): return x < y class TermGreaterThan_2(BuiltIn): \"\"\"'@>(@term, @term) Test the ordering of", "term._isnumber(): return term.value if isinstance(term, Compound): from ..core import deref args = (evaluate_expression(deref(a))", "False return x == y class TermNotIdentical_2(BuiltIn): \"\"\"'\\=='(@term, @term) Test the ordering of", "of exiting from the processor and returning to whatever system invoked Prolog.''' def", "Functor_3(BuiltIn): '''functor(-nonvar, +atomic, +integer) functor(+nonvar, ?atomic, ?integer) functor(Term, Name, Arity) is true iff:", "isvariable(code): self.throw_instantiation_error() if not isvariable(char) and len(char.name) != 1: self.throw_type_error('character', char) if not", "return False return self.pick_data(atom, before, length, after, subatom) def reexecute(self, atom, before, length,", "Atom_1(BuiltIn): '''atom(@term) atom(X) is true iff X is a member of the set", "A, or CT.''' def execute(self, x): return isnumber(x) ### ### Term comparison (ISO", "if (not isvariable(instances) and (not islist(instances) and not ispartiallist(instances))): self.throw_type_error('list', instances) from ..", "(Not Subject To Occur-check) then '='(X, Y) is true iff X and Y", "8.7) '=:=/2' : ArithmeticEqual_2, '=\\=/2' : ArithmeticNotEqual_2, '</2' : ArithmeticLessThan_2, '=</2' : ArithmeticLessThanOrEqual_2,", "(ISO 8.16) 'atom_length/2' : AtomLength_2, 'atom_concat/3' : AtomConcat_3, 'sub_atom/5' : SubAtom_5, 'atom_chars/2' :", "isatom(atom): self.throw_type_error('atom', atom) if isvariable(atom): if ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and not", "args = (evaluate_expression(deref(a)) for a in term.value[1:]) pi = term.predicate_indicator() functor = search_evaluable_functor(pi)", "= (evaluate_expression(deref(a)) for a in term.value[1:]) pi = term.predicate_indicator() functor = search_evaluable_functor(pi) if", "own.''' if not y: from ..core import PrologEvaluationError raise PrologEvaluationError('zero_divisor') if not isinstance(x,", "probably a parser # bug: the operator's \"scope\" is much wider than the", "y) with x and y integers is equivalent to intdiv(x, y). Also, we", "'>>/2' : rightshift, '<</2' : leftshift, '/\\\\/2' : bitand, '\\\\//2' : bitor, '\\\\/1'", "def reexecute(self): return True ### ### Atomic term processing (ISO 8.16) ### class", "self.data: return False return self.pick_data(atom, before, length, after, subatom) def reexecute(self, atom, before,", "is a member of the set V or CT.''' def execute(self, x): return", "evaluation (ISO 8.6) 'is/2' : Is_2, # Arithmetic comparison (ISO 8.7) '=:=/2' :", "not None} if not self.flags: return False return self.pick_flag(flag, value) def reexecute(self, flag,", "..core import PrologTypeError raise PrologTypeError('integer', Atomic(x)) return ~x ### ### Arithmetic comparison (ISO", "the proper unify_with_occur_check/2 predicate. return self.unify(x, y) class NotUnifiable_2(BuiltIn): \"\"\"'\\\\='(@term, @term) If X", "ispartiallist(codelist): self.throw_instantiation_error() if not islist(codelist) and not ispartiallist(codelist): self.throw_type_error('list', codelist) for element in", "equal to the functor of the head of Clause.''' def execute(self, clause): head", "def execute(self, x): return isnumber(x) ### ### Term comparison (ISO 8.4) ### class", "[Atomic(c) for c in chars] return self.unify(charlist, List.from_list(lst)) class NumberCodes_2(BuiltIn): '''number_codes(+number, ?character_code_list)\\nnumber_codes(-number, ?character_code_list)", "(self.unify(atom1, Atomic(c[0])) and self.unify(atom2, Atomic(c[1])) and self.unify(atom12, Atomic(c[2]))) class SubAtom_5(BuiltIn): '''sub_atom(+atom, ?integer, ?integer,", "return d.get(name) def divide(x, y): '''Redefined w.r.t. Python because in ISO Prolog div(x,", "arity) # TODO Missing max_arity related error if isvariable(term) and arity.value < 0:", "or isvariable(term): self.throw_instantiation_error() if not isinstance(n.value, int): self.throw_type_error('integer', n) if not isinstance(term, Compound):" ]
[ "sensor_msgs.msg import Imu, Image def main(): if len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0]))", "rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar = Bar('Camera', max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']):", "main(): if len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file", "m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c,", "img_data.shape[2] m_img.step = 3 * img_data.shape[2] m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data, (1,", "izip import numpy as np import h5py from progress.bar import Bar import sys", "m_img.step = 3 * img_data.shape[2] m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data, (1, 2,", "= rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i])", "np import h5py from progress.bar import Bar import sys import rospy import rosbag", "c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp) bar.next() bar.finish() if __name__ == \"__main__\": main()", "v_gyro in zipped_log: m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for", "np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times']))", "m_img, m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro in", "in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp)", "rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1] m_img.width = img_data.shape[2] m_img.step = 3 *", "= h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag:", "= img_data.shape[1] m_img.width = img_data.shape[2] m_img.step = 3 * img_data.shape[2] m_img.encoding = 'rgb8'", "for i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp) bar.next() bar.finish() if __name__ ==", "Imu, Image def main(): if len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name", "'w') as bag: bar = Bar('Camera', max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']): m_img", "enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp) bar.next()", "m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1] m_img.width = img_data.shape[2] m_img.step =", "Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1] m_img.width = img_data.shape[2] m_img.step", "= h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name),", "= Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro in zipped_log: m_imu = Imu() m_imu.header.stamp", "i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c in enumerate('xyz')] bag.write('/fiber_imu',", "< 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file", "= izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar = Bar('Camera',", "i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp) bar.next() bar.finish() if __name__ == \"__main__\":", "for time, v_accel, v_gyro in zipped_log: m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration,", "h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar", "img_data.shape[1] m_img.width = img_data.shape[2] m_img.step = 3 * img_data.shape[2] m_img.encoding = 'rgb8' m_img.data", "print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name))", "log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar = Bar('Camera', max=len(camera_file['X'])) for i,", "dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log =", "Image def main(): if len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name =", "bar = Bar('Camera', max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp", "* img_data.shape[2] m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img,", "rospy import rosbag from sensor_msgs.msg import Imu, Image def main(): if len(sys.argv) <", "bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro in zipped_log: m_imu =", "as bag: bar = Bar('Camera', max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']): m_img =", "sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro'])", "'rgb8' m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar", "h5py from progress.bar import Bar import sys import rospy import rosbag from sensor_msgs.msg", "import Bar import sys import rospy import rosbag from sensor_msgs.msg import Imu, Image", "bag: bar = Bar('Camera', max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']): m_img = Image()", "= rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1] m_img.width = img_data.shape[2] m_img.step = 3", "progress.bar import Bar import sys import rospy import rosbag from sensor_msgs.msg import Imu,", "m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar =", "in zipped_log: m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i,", "= img_data.shape[2] m_img.step = 3 * img_data.shape[2] m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data,", "[setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i,", "v_accel, v_gyro in zipped_log: m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i])", "exit(1) file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip(", "camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as", "python from itertools import izip import numpy as np import h5py from progress.bar", "from itertools import izip import numpy as np import h5py from progress.bar import", "m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1] m_img.width =", "= 3 * img_data.shape[2] m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist()", "2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for time,", "for i, img_data in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i)", "* i) m_img.height = img_data.shape[1] m_img.width = img_data.shape[2] m_img.step = 3 * img_data.shape[2]", "0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for time, v_accel,", "v_gyro[i]) for i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp) bar.next() bar.finish() if __name__", "zipped_log: m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c", "zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar =", "from sensor_msgs.msg import Imu, Image def main(): if len(sys.argv) < 2: print(\"Usage: {}", "itertools import izip import numpy as np import h5py from progress.bar import Bar", "import rospy import rosbag from sensor_msgs.msg import Imu, Image def main(): if len(sys.argv)", "log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with", "#!/usr/bin/env python from itertools import izip import numpy as np import h5py from", "c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu,", "with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar = Bar('Camera', max=len(camera_file['X'])) for i, img_data in", "max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01 *", "max=len(log_file['times'])) for time, v_accel, v_gyro in zipped_log: m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time)", "rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for", "from progress.bar import Bar import sys import rospy import rosbag from sensor_msgs.msg import", "import izip import numpy as np import h5py from progress.bar import Bar import", "v_accel[i]) for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c in", "= 'rgb8' m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish()", "Bar import sys import rospy import rosbag from sensor_msgs.msg import Imu, Image def", "def main(): if len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1]", "[setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp) bar.next() bar.finish()", "bar = Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro in zipped_log: m_imu = Imu()", "file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'],", "log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar = Bar('Camera', max=len(camera_file['X'])) for", "3 * img_data.shape[2] m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw',", "Bar('Camera', max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01", "c, v_accel[i]) for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c", "import numpy as np import h5py from progress.bar import Bar import sys import", "i) m_img.height = img_data.shape[1] m_img.width = img_data.shape[2] m_img.step = 3 * img_data.shape[2] m_img.encoding", "m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c in", "sys import rospy import rosbag from sensor_msgs.msg import Imu, Image def main(): if", "len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name))", "import Imu, Image def main(): if len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1)", "bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro", "= Bar('Camera', max=len(camera_file['X'])) for i, img_data in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp =", "img_data in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height =", "if len(sys.argv) < 2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file =", "= sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'],", "= Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1] m_img.width = img_data.shape[2]", "enumerate(camera_file['X']): m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1] m_img.width", "m_img.width = img_data.shape[2] m_img.step = 3 * img_data.shape[2] m_img.encoding = 'rgb8' m_img.data =", "m_img.height = img_data.shape[1] m_img.width = img_data.shape[2] m_img.step = 3 * img_data.shape[2] m_img.encoding =", "import h5py from progress.bar import Bar import sys import rospy import rosbag from", "{} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log", "c, v_gyro[i]) for i, c in enumerate('xyz')] bag.write('/fiber_imu', m_imu, m_imu.header.stamp) bar.next() bar.finish() if", "import rosbag from sensor_msgs.msg import Imu, Image def main(): if len(sys.argv) < 2:", "img_data.shape[2] m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp)", "Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity,", "2: print(\"Usage: {} dataset_name\".format(sys.argv[0])) exit(1) file_name = sys.argv[1] log_file = h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file =", "in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height = img_data.shape[1]", "rosbag from sensor_msgs.msg import Imu, Image def main(): if len(sys.argv) < 2: print(\"Usage:", "= Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c, v_accel[i]) for i, c in enumerate('xyz')]", "(1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for", "numpy as np import h5py from progress.bar import Bar import sys import rospy", "Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro in zipped_log: m_imu = Imu() m_imu.header.stamp =", "bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro in zipped_log: m_imu", "izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar = Bar('Camera', max=len(camera_file['X']))", "log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w') as bag: bar = Bar('Camera', max=len(camera_file['X'])) for i, img_data", "i, img_data in enumerate(camera_file['X']): m_img = Image() m_img.header.stamp = rospy.Time.from_sec(0.01 * i) m_img.height", "as np import h5py from progress.bar import Bar import sys import rospy import", "m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU', max=len(log_file['times'])) for time, v_accel, v_gyro in zipped_log:", "h5py.File('../dataset/log/{}.h5'.format(file_name)) camera_file = h5py.File('../dataset/camera/{}.h5'.format(file_name)) zipped_log = izip( log_file['times'], log_file['fiber_accel'], log_file['fiber_gyro']) with rosbag.Bag('{}.bag'.format(file_name), 'w')", "= np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next() bar.finish() bar = Bar('IMU',", "for i, c in enumerate('xyz')] [setattr(m_imu.angular_velocity, c, v_gyro[i]) for i, c in enumerate('xyz')]", "time, v_accel, v_gyro in zipped_log: m_imu = Imu() m_imu.header.stamp = rospy.Time.from_sec(time) [setattr(m_imu.linear_acceleration, c,", "m_img.encoding = 'rgb8' m_img.data = np.transpose(img_data, (1, 2, 0)).flatten().tolist() bag.write('/camera/image_raw', m_img, m_img.header.stamp) bar.next()", "import sys import rospy import rosbag from sensor_msgs.msg import Imu, Image def main():" ]
[ "models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True)", "blank=True, related_name=\"evolution\") def __str__(self): return self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\")", "self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon =", "models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared", "models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True,", "= models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True,", "= models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True,", "models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True,", "verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence", "level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True,", "at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True)", "models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\", blank=True)", "verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self): return self.title class PokemonEntity(models.Model):", "blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength =", "on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self): return self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon,", "models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское", "previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self): return", "return self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon", "эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self): return self.title class PokemonEntity(models.Model): pokemon =", "название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True)", "blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\", blank=True) def __str__(self):", "on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\",", "= models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\",", "models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\",", "description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True,", "related_name=\"evolution\") def __str__(self): return self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat", "strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True,", "models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\", blank=True) def __str__(self): return f'{self.pokemon}, lvl:", "defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\", blank=True) def __str__(self): return", "= models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из", "lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at", "models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого", "verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True)", "title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description", "PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at", "at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength", "image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\",", "= models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description =", "import models class Pokemon(models.Model): title = models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское", "models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def", "blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina =", "= models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\",", "null=True, blank=True, related_name=\"evolution\") def __str__(self): return self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE,", "blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True,", "blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution =", "models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\",", "Pokemon(models.Model): title = models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp", "= models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self): return self.title", "blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health", "def __str__(self): return self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat =", "verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\",", "verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\", blank=True) def __str__(self): return f'{self.pokemon}, lvl: {self.level}'", "models class Pokemon(models.Model): title = models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\",", "blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True)", "verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina", "models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self): return self.title class", "health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True,", "title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image", "blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence =", "= models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image =", "= models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at =", "= models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\",", "from django.db import models class Pokemon(models.Model): title = models.CharField(max_length=200, verbose_name=\"Русское название\") title_en =", "название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution", "verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\",", "verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\", blank=True) def", "= models.IntegerField(null=True, verbose_name=\"Defence\", blank=True) stamina = models.IntegerField(null=True, verbose_name=\"Stamina\", blank=True) def __str__(self): return f'{self.pokemon},", "disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health =", "pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at =", "__str__(self): return self.title class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\")", "= models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200,", "покемона\", null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\",", "models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка", "verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\",", "class Pokemon(models.Model): title = models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True)", "= models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level =", "verbose_name=\"Disappeared at\", blank=True) level = models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True)", "blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self):", "lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared", "class PokemonEntity(models.Model): pokemon = models.ForeignKey(Pokemon, on_delete=models.CASCADE, related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\")", "django.db import models class Pokemon(models.Model): title = models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200,", "appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True) disappeared_at = models.DateTimeField(null=True, verbose_name=\"Disappeared at\", blank=True) level", "models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True)", "название\", blank=True) title_jp = models.CharField(max_length=200, verbose_name=\"Японское название\", blank=True) image = models.ImageField(verbose_name=\"Картинка покемона\", null=True,", "= models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\")", "кого эволюционирует\", on_delete=models.SET_NULL, null=True, blank=True, related_name=\"evolution\") def __str__(self): return self.title class PokemonEntity(models.Model): pokemon", "related_name=\"entities\") lat = models.FloatField(verbose_name=\"Lat\") lon = models.FloatField(verbose_name=\"Lon\") appeared_at = models.DateTimeField(null=True, verbose_name=\"Appeared at\", blank=True)", "null=True, blank=True) description = models.TextField(verbose_name=\"Описание\", blank=True) previous_evolution = models.ForeignKey(\"self\", verbose_name=\"Из кого эволюционирует\", on_delete=models.SET_NULL,", "= models.IntegerField(null=True, verbose_name=\"Level\", blank=True) health = models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\",", "title = models.CharField(max_length=200, verbose_name=\"Русское название\") title_en = models.CharField(max_length=200, verbose_name=\"Английское название\", blank=True) title_jp =", "models.IntegerField(null=True, verbose_name=\"Health\", blank=True) strength = models.IntegerField(null=True, verbose_name=\"Strength\", blank=True) defence = models.IntegerField(null=True, verbose_name=\"Defence\", blank=True)" ]